diff --git a/CHANGELOG.md b/CHANGELOG.md index 41e7d2e12c564..863de296812e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add pointer based lag metric in pull-based ingestion ([#19635](https://github.com/opensearch-project/OpenSearch/pull/19635)) ### Changed +- Combining filter rewrite and skip list to optimize sub aggregation([#19573](https://github.com/opensearch-project/OpenSearch/pull/19573)) - Faster `terms` query creation for `keyword` field with index and docValues enabled ([#19350](https://github.com/opensearch-project/OpenSearch/pull/19350)) - Refactor to move prepareIndex and prepareDelete methods to Engine class ([#19551](https://github.com/opensearch-project/OpenSearch/pull/19551)) - Omit maxScoreCollector in SimpleTopDocsCollectorContext when concurrent segment search enabled ([#19584](https://github.com/opensearch-project/OpenSearch/pull/19584)) diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index e5caa7cb23666..195b16c63f9fe 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,6 +1,6 @@ [versions] opensearch = "3.4.0" -lucene = "10.3.1" +lucene = "10.4.0-snapshot-a8c1455" bundled_jdk_vendor = "adoptium" bundled_jdk = "24.0.2+12" diff --git a/libs/core/licenses/lucene-core-10.3.1.jar.sha1 b/libs/core/licenses/lucene-core-10.3.1.jar.sha1 deleted file mode 100644 index 2f8f5071a7a7b..0000000000000 --- a/libs/core/licenses/lucene-core-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b0ea7e448e7377bd71892d818635cf9546299f4a \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-10.4.0-snapshot-a8c1455.jar.sha1 b/libs/core/licenses/lucene-core-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..f036d2163e0fa --- /dev/null +++ b/libs/core/licenses/lucene-core-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +96781fdefe4a7e2ac3482ccb41801b2cd1607501 \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 51914bd4ce20b..0eb381b443a25 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -123,7 +123,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_3_2_0 = new Version(3020099, org.apache.lucene.util.Version.LUCENE_10_2_2); public static final Version V_3_3_0 = new Version(3030099, org.apache.lucene.util.Version.LUCENE_10_3_1); public static final Version V_3_3_1 = new Version(3030199, org.apache.lucene.util.Version.LUCENE_10_3_1); - public static final Version V_3_4_0 = new Version(3040099, org.apache.lucene.util.Version.LUCENE_10_3_1); + public static final Version V_3_4_0 = new Version(3040099, org.apache.lucene.util.Version.LUCENE_10_4_0); public static final Version CURRENT = V_3_4_0; public static Version fromId(int id) { diff --git a/modules/lang-expression/licenses/lucene-expressions-10.3.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-10.3.1.jar.sha1 deleted file mode 100644 index 8ea82a391b7e0..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -623dbba838d274b2801bcc4e65751af8e85fc74a \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-10.4.0-snapshot-a8c1455.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..a778f3734eaea --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +886e976447a505c02e636ba67b4d381b257a3f44 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-10.3.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-10.3.1.jar.sha1 deleted file mode 100644 index 0086401674932..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e8d40bfadd7810de290a0d20772c25b1a7cea23c \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..5d055d9231497 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +2ad93f9924ecc4491b26dc540bef1fabe2afd2ea \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.3.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.3.1.jar.sha1 deleted file mode 100644 index 7c47caace3c70..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f02182aee7f0a95f0dc36c7814347863476648f1 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..d534a31707557 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +6efc194346481fb1ca2a174182ad2429294021bb \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-10.3.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-10.3.1.jar.sha1 deleted file mode 100644 index 279be6d83f7d1..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -91175e30ea9e1ca94ca5029a004dc343c7ec97c8 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..73745fbe6bd70 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +11a5ecc992d8725bfb760925f07fe13a40ce4b06 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.3.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.3.1.jar.sha1 deleted file mode 100644 index 81947c3843d84..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e77d452325d010f5d3239c27e3d4bc6c3f0eca0c \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..9b08ef6f575e1 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +7821a0b53f5c3e25418659163968f60327484da1 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.3.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.3.1.jar.sha1 deleted file mode 100644 index 8818f63f06433..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f9b118a70bee80f9b2958658680a1686bb5bdd07 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..f2b538a1608b4 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +6bff6a2e336dee93a4dff882138296d80ab62fd6 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.3.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.3.1.jar.sha1 deleted file mode 100644 index 18e25fe7f5c87..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -935fdb8970ce262a9eb87387f989e676fd0dc732 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..6281cd2270a03 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +5d76ae18c4e5119845cd278ed276929505c88bfe \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.3.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.3.1.jar.sha1 deleted file mode 100644 index 70e059b88f9de..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -11727455ac9b8f2e9f83c7f28a4fcaec5a5b4d36 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.4.0-snapshot-a8c1455.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..60cb1191410a9 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +9513d1dcd7dd057a9026fa3c0764d778540d3b8d \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-10.3.1.jar.sha1 b/server/licenses/lucene-analysis-common-10.3.1.jar.sha1 deleted file mode 100644 index bfa5cc9d8e6ee..0000000000000 --- a/server/licenses/lucene-analysis-common-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c60fcbb87908349527db7c4c19e069a7156fc3a \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-analysis-common-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..26ab2d0886948 --- /dev/null +++ b/server/licenses/lucene-analysis-common-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +7142df4d1076f734aabd863fc56516b58dcae605 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-10.3.1.jar.sha1 b/server/licenses/lucene-backward-codecs-10.3.1.jar.sha1 deleted file mode 100644 index 1376a74dc0f38..0000000000000 --- a/server/licenses/lucene-backward-codecs-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eff56ed4d97bcc57895404e6f702d82111028842 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-backward-codecs-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..3be0fd622f10d --- /dev/null +++ b/server/licenses/lucene-backward-codecs-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +3d9f4cf13f20e1b384021a808d8f57a73427287e \ No newline at end of file diff --git a/server/licenses/lucene-core-10.3.1.jar.sha1 b/server/licenses/lucene-core-10.3.1.jar.sha1 deleted file mode 100644 index 2f8f5071a7a7b..0000000000000 --- a/server/licenses/lucene-core-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b0ea7e448e7377bd71892d818635cf9546299f4a \ No newline at end of file diff --git a/server/licenses/lucene-core-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-core-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..f036d2163e0fa --- /dev/null +++ b/server/licenses/lucene-core-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +96781fdefe4a7e2ac3482ccb41801b2cd1607501 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-10.3.1.jar.sha1 b/server/licenses/lucene-grouping-10.3.1.jar.sha1 deleted file mode 100644 index 4038b2c403e5d..0000000000000 --- a/server/licenses/lucene-grouping-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -baa6b2891084d94e8c5a60c564a1beec860cdf9f \ No newline at end of file diff --git a/server/licenses/lucene-grouping-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-grouping-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..38b8beb22ee86 --- /dev/null +++ b/server/licenses/lucene-grouping-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +beba73c7a31f7f72d21a1485b200d864706638b3 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-10.3.1.jar.sha1 b/server/licenses/lucene-highlighter-10.3.1.jar.sha1 deleted file mode 100644 index 4ba4a98791335..0000000000000 --- a/server/licenses/lucene-highlighter-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -565fe5b07af59cb17d44665976f6de8ebf150080 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-highlighter-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..514916efe95fc --- /dev/null +++ b/server/licenses/lucene-highlighter-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +077fca639cabd72a1c8521e6e3e8aacc36d0b7eb \ No newline at end of file diff --git a/server/licenses/lucene-join-10.3.1.jar.sha1 b/server/licenses/lucene-join-10.3.1.jar.sha1 deleted file mode 100644 index 3b8ee261835e9..0000000000000 --- a/server/licenses/lucene-join-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3ebb9a7507f7ccb082079783115cb9deb3997a48 \ No newline at end of file diff --git a/server/licenses/lucene-join-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-join-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..fa2c3f28b08f9 --- /dev/null +++ b/server/licenses/lucene-join-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +79ab48828901f08ca79b74fd0d1de855a7ed8cda \ No newline at end of file diff --git a/server/licenses/lucene-memory-10.3.1.jar.sha1 b/server/licenses/lucene-memory-10.3.1.jar.sha1 deleted file mode 100644 index 7d3b12eef0206..0000000000000 --- a/server/licenses/lucene-memory-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e1a22b54460e96e0ba4d890727e6de61119e5af \ No newline at end of file diff --git a/server/licenses/lucene-memory-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-memory-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..88d6d7c6112cd --- /dev/null +++ b/server/licenses/lucene-memory-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +de41ae03a22b20fa52140f178ae62cad7724cf4e \ No newline at end of file diff --git a/server/licenses/lucene-misc-10.3.1.jar.sha1 b/server/licenses/lucene-misc-10.3.1.jar.sha1 deleted file mode 100644 index 50c21e2320ebe..0000000000000 --- a/server/licenses/lucene-misc-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3d278c37ed0467545aff3a26712a7666eb3d4de3 \ No newline at end of file diff --git a/server/licenses/lucene-misc-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-misc-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..657c12d7cc7ca --- /dev/null +++ b/server/licenses/lucene-misc-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +7ae8474b4a17d92394126f9c6cc8e76e479df388 \ No newline at end of file diff --git a/server/licenses/lucene-queries-10.3.1.jar.sha1 b/server/licenses/lucene-queries-10.3.1.jar.sha1 deleted file mode 100644 index ff7c68f70db6c..0000000000000 --- a/server/licenses/lucene-queries-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4a2e8877832d92238ebd35ef1daec46513b8fc9 \ No newline at end of file diff --git a/server/licenses/lucene-queries-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-queries-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..36d81c37379ef --- /dev/null +++ b/server/licenses/lucene-queries-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +24e09399d8c70f5a6a0d72546cec522696bf4bac \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-10.3.1.jar.sha1 b/server/licenses/lucene-queryparser-10.3.1.jar.sha1 deleted file mode 100644 index c67c4bb553939..0000000000000 --- a/server/licenses/lucene-queryparser-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd79873a43346c436beee173e9bf7c6a0d0def0c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-queryparser-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..0e38a1e1de26f --- /dev/null +++ b/server/licenses/lucene-queryparser-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +86cf4d847c39a0081d23535be84bf36dbb6a262a \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-10.3.1.jar.sha1 b/server/licenses/lucene-sandbox-10.3.1.jar.sha1 deleted file mode 100644 index 66afe3dcd7958..0000000000000 --- a/server/licenses/lucene-sandbox-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09d9101439f89cc1dd0312773e26ac797ba10cc4 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-sandbox-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..6395cabb47562 --- /dev/null +++ b/server/licenses/lucene-sandbox-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +c1078394a5db5510b3b8d508440d53be642fc1eb \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-10.3.1.jar.sha1 b/server/licenses/lucene-spatial-extras-10.3.1.jar.sha1 deleted file mode 100644 index 52b33a15cf9c3..0000000000000 --- a/server/licenses/lucene-spatial-extras-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f9161056794551b67bcb7236e4964a954b919cb \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-spatial-extras-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..75b101054b014 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +8b7925295d45c23b9fc087e9762afc74da578ae5 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-10.3.1.jar.sha1 b/server/licenses/lucene-spatial3d-10.3.1.jar.sha1 deleted file mode 100644 index 873138222eaf7..0000000000000 --- a/server/licenses/lucene-spatial3d-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -17253a087ede8755ff00623a65403e79d0bc555a \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-spatial3d-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..9023668aeb09c --- /dev/null +++ b/server/licenses/lucene-spatial3d-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +055860e2000dfc62982ccb1e2ba4ea8bf75acd8e \ No newline at end of file diff --git a/server/licenses/lucene-suggest-10.3.1.jar.sha1 b/server/licenses/lucene-suggest-10.3.1.jar.sha1 deleted file mode 100644 index a562c2826bad7..0000000000000 --- a/server/licenses/lucene-suggest-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fc3ca91714ea9c41482bf1076d73f5212d48e45b \ No newline at end of file diff --git a/server/licenses/lucene-suggest-10.4.0-snapshot-a8c1455.jar.sha1 b/server/licenses/lucene-suggest-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..572f527387b32 --- /dev/null +++ b/server/licenses/lucene-suggest-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +197e5f4d7f5dab0442c1865fce9dd941d7620022 \ No newline at end of file diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index 61530b13dfcf1..aadc214a610e7 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -111,7 +111,7 @@ * @opensearch.internal */ public class Lucene { - public static final String LATEST_CODEC = "Lucene103"; + public static final String LATEST_CODEC = "Lucene104"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; public static final String PARENT_FIELD = "__nested_parent"; diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index 07e49f9d1e3ff..3657e90fd8dd3 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.opensearch.common.Nullable; import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.collect.MapBuilder; @@ -71,23 +71,23 @@ public CodecService(@Nullable MapperService mapperService, IndexSettings indexSe final MapBuilder codecs = MapBuilder.newMapBuilder(); assert null != indexSettings; if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene103Codec()); - codecs.put(LZ4, new Lucene103Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene103Codec(Lucene103Codec.Mode.BEST_COMPRESSION)); - codecs.put(ZLIB, new Lucene103Codec(Lucene103Codec.Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene104Codec()); + codecs.put(LZ4, new Lucene104Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene104Codec(Lucene104Codec.Mode.BEST_COMPRESSION)); + codecs.put(ZLIB, new Lucene104Codec(Lucene104Codec.Mode.BEST_COMPRESSION)); } else { // CompositeCodec still delegates to PerFieldMappingPostingFormatCodec // We can still support all the compression codecs when composite index is present if (mapperService.isCompositeIndexPresent()) { codecs.putAll(compositeCodecFactory.getCompositeIndexCodecs(mapperService, logger)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Lucene103Codec.Mode.BEST_SPEED, mapperService, logger)); - codecs.put(LZ4, new PerFieldMappingPostingFormatCodec(Lucene103Codec.Mode.BEST_SPEED, mapperService, logger)); + codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Lucene104Codec.Mode.BEST_SPEED, mapperService, logger)); + codecs.put(LZ4, new PerFieldMappingPostingFormatCodec(Lucene104Codec.Mode.BEST_SPEED, mapperService, logger)); codecs.put( BEST_COMPRESSION_CODEC, - new PerFieldMappingPostingFormatCodec(Lucene103Codec.Mode.BEST_COMPRESSION, mapperService, logger) + new PerFieldMappingPostingFormatCodec(Lucene104Codec.Mode.BEST_COMPRESSION, mapperService, logger) ); - codecs.put(ZLIB, new PerFieldMappingPostingFormatCodec(Lucene103Codec.Mode.BEST_COMPRESSION, mapperService, logger)); + codecs.put(ZLIB, new PerFieldMappingPostingFormatCodec(Lucene104Codec.Mode.BEST_COMPRESSION, mapperService, logger)); } } codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index b2f288f03d5f6..ee2d2398af277 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -36,7 +36,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; @@ -59,7 +59,7 @@ * * @opensearch.internal */ -public class PerFieldMappingPostingFormatCodec extends Lucene103Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene104Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java index 88c9c9b9de111..5c5e88edbb6eb 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java @@ -10,10 +10,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.index.codec.composite.backward_codecs.composite101.Composite101Codec; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.mapper.MapperService; import java.util.HashMap; @@ -33,16 +32,16 @@ public class CompositeCodecFactory { // we can use this to track the latest composite codec - public static final String COMPOSITE_CODEC = Composite101Codec.COMPOSITE_INDEX_CODEC_NAME; + public static final String COMPOSITE_CODEC = Composite104Codec.COMPOSITE_INDEX_CODEC_NAME; public CompositeCodecFactory() {} public Map getCompositeIndexCodecs(MapperService mapperService, Logger logger) { Map codecs = new HashMap<>(); - codecs.put(DEFAULT_CODEC, new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, logger)); - codecs.put(LZ4, new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, logger)); - codecs.put(BEST_COMPRESSION_CODEC, new Composite103Codec(Lucene103Codec.Mode.BEST_COMPRESSION, mapperService, logger)); - codecs.put(ZLIB, new Composite103Codec(Lucene103Codec.Mode.BEST_COMPRESSION, mapperService, logger)); + codecs.put(DEFAULT_CODEC, new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, logger)); + codecs.put(LZ4, new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, logger)); + codecs.put(BEST_COMPRESSION_CODEC, new Composite104Codec(Lucene104Codec.Mode.BEST_COMPRESSION, mapperService, logger)); + codecs.put(ZLIB, new Composite104Codec(Lucene104Codec.Mode.BEST_COMPRESSION, mapperService, logger)); return codecs; } } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite101/package-info.java b/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite101/package-info.java index c35f209cf89e0..0205fabe5d73b 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite101/package-info.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite101/package-info.java @@ -7,6 +7,6 @@ */ /** - * Responsible for handling all composite index codecs and operations associated with Composite100 codec + * Responsible for handling all composite index codecs and operations associated with Composite101 codec */ package org.opensearch.index.codec.composite.backward_codecs.composite101; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite103/Composite103Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite103/Composite103Codec.java similarity index 77% rename from server/src/main/java/org/opensearch/index/codec/composite/composite103/Composite103Codec.java rename to server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite103/Composite103Codec.java index 043478cc6b34b..2c7ad01a4096c 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite103/Composite103Codec.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite103/Composite103Codec.java @@ -6,15 +6,13 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite.composite103; +package org.opensearch.index.codec.composite.backward_codecs.composite103; -import org.apache.logging.log4j.Logger; +import org.apache.lucene.backward_codecs.lucene103.Lucene103Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.index.codec.PerFieldMappingPostingFormatCodec; import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.mapper.MapperService; @@ -34,10 +32,6 @@ public Composite103Codec() { this(COMPOSITE_INDEX_CODEC_NAME, new Lucene103Codec(), null); } - public Composite103Codec(Lucene103Codec.Mode compressionMode, MapperService mapperService, Logger logger) { - this(COMPOSITE_INDEX_CODEC_NAME, new PerFieldMappingPostingFormatCodec(compressionMode, mapperService, logger), mapperService); - } - /** * Sole constructor. When subclassing this codec, create a no-arg ctor and pass the delegate codec and a unique name to * this ctor. diff --git a/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite103/package-info.java b/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite103/package-info.java new file mode 100644 index 0000000000000..d819de7fd6346 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/backward_codecs/composite103/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Responsible for handling all composite index codecs and operations associated with Composite103 codec + */ +package org.opensearch.index.codec.composite.backward_codecs.composite103; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite104/Composite104Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/composite104/Composite104Codec.java new file mode 100644 index 0000000000000..a759996bff3c6 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite104/Composite104Codec.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite.composite104; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.index.codec.PerFieldMappingPostingFormatCodec; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; +import org.opensearch.index.mapper.MapperService; + +/** + * Extends the Codec to support new file formats for composite indices eg: star tree index + * based on the mappings. + * + * @opensearch.experimental + */ +@ExperimentalApi +public class Composite104Codec extends FilterCodec { + public static final String COMPOSITE_INDEX_CODEC_NAME = "Composite104Codec"; + private final MapperService mapperService; + + // needed for SPI - this is used in reader path + public Composite104Codec() { + this(COMPOSITE_INDEX_CODEC_NAME, new Lucene104Codec(), null); + } + + public Composite104Codec(Lucene104Codec.Mode compressionMode, MapperService mapperService, Logger logger) { + this(COMPOSITE_INDEX_CODEC_NAME, new PerFieldMappingPostingFormatCodec(compressionMode, mapperService, logger), mapperService); + } + + /** + * Sole constructor. When subclassing this codec, create a no-arg ctor and pass the delegate codec and a unique name to + * this ctor. + * + * @param name name of the codec + * @param delegate codec delegate + * @param mapperService mapper service instance + */ + protected Composite104Codec(String name, Codec delegate, MapperService mapperService) { + super(name, delegate); + this.mapperService = mapperService; + } + + @Override + public DocValuesFormat docValuesFormat() { + return new Composite912DocValuesFormat(mapperService); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite103/package-info.java b/server/src/main/java/org/opensearch/index/codec/composite/composite104/package-info.java similarity index 73% rename from server/src/main/java/org/opensearch/index/codec/composite/composite103/package-info.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite104/package-info.java index 424609b521fd1..970c7fe85e9b0 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite103/package-info.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite104/package-info.java @@ -7,6 +7,6 @@ */ /** - * Responsible for handling all composite index codecs and operations associated with Composite100 codec + * Responsible for handling all composite index codecs and operations associated with Composite104 codec */ -package org.opensearch.index.codec.composite.composite103; +package org.opensearch.index.codec.composite.composite104; diff --git a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java index a78d181e362b2..246aa2e803954 100644 --- a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; -import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion104PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -330,7 +330,7 @@ public ContextMappings getContextMappings() { */ public static synchronized PostingsFormat postingsFormat() { if (postingsFormat == null) { - postingsFormat = new Completion101PostingsFormat(); + postingsFormat = new Completion104PostingsFormat(); } return postingsFormat; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/BitSetDocIdStream.java b/server/src/main/java/org/opensearch/search/aggregations/BitSetDocIdStream.java new file mode 100644 index 0000000000000..a33b1dbf79180 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/BitSetDocIdStream.java @@ -0,0 +1,76 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations; + +import org.apache.lucene.search.CheckedIntConsumer; +import org.apache.lucene.search.DocIdStream; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.MathUtil; + +import java.io.IOException; + +/** + * DocIdStream implementation using FixedBitSet. This is duplicate of the implementation in Lucene + * and should ideally eventually be removed. + * + * @opensearch.internal + */ +public final class BitSetDocIdStream extends DocIdStream { + + private final FixedBitSet bitSet; + private final int offset, max; + private int upTo; + + public BitSetDocIdStream(FixedBitSet bitSet, int offset) { + this.bitSet = bitSet; + this.offset = offset; + upTo = offset; + max = MathUtil.unsignedMin(Integer.MAX_VALUE, offset + bitSet.length()); + } + + @Override + public boolean mayHaveRemaining() { + return upTo < max; + } + + @Override + public void forEach(int upTo, CheckedIntConsumer consumer) throws IOException { + if (upTo > this.upTo) { + upTo = Math.min(upTo, max); + bitSet.forEach(this.upTo - offset, upTo - offset, offset, consumer); + this.upTo = upTo; + } + } + + @Override + public int count(int upTo) throws IOException { + if (upTo > this.upTo) { + upTo = Math.min(upTo, max); + int count = bitSet.cardinality(this.upTo - offset, upTo - offset); + this.upTo = upTo; + return count; + } else { + return 0; + } + } + + @Override + public int intoArray(int upTo, int[] array) { + if (upTo > this.upTo) { + upTo = Math.min(upTo, max); + int count = bitSet.intoArray(this.upTo - offset, upTo - offset, offset, array); + if (count == array.length) { // The whole range of doc IDs may not have been copied + upTo = array[array.length - 1] + 1; + } + this.upTo = upTo; + return count; + } + return 0; + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/LeafBucketCollector.java b/server/src/main/java/org/opensearch/search/aggregations/LeafBucketCollector.java index 0b34ffc78853a..4b7e4d36ce937 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/LeafBucketCollector.java +++ b/server/src/main/java/org/opensearch/search/aggregations/LeafBucketCollector.java @@ -32,8 +32,10 @@ package org.opensearch.search.aggregations; +import org.apache.lucene.search.DocIdStream; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Scorable; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; import java.io.IOException; @@ -123,6 +125,34 @@ public void collect(int doc) throws IOException { collect(doc, 0); } + /** + * Bulk-collect doc IDs within owningBucketOrd. + * + *

Note: The provided {@link DocIdStream} may be reused across calls and should be consumed + * immediately. + * + *

Note: The provided {@link DocIdStream} typically holds all the docIds for the corresponding + * owningBucketOrd. This method may be called multiple times per segment (but once per owningBucketOrd). + * + *

While the {@link DocIdStream} for each owningBucketOrd is sorted by docIds, it is NOT GUARANTEED + * that doc IDs arrive in order across invocations for different owningBucketOrd. + * + *

It is NOT LEGAL for callers to mix calls to {@link #collect(DocIdStream, long)} and {@link + * #collect(int, long)}. + * + *

The default implementation calls {@code stream.forEach(doc -> collect(doc, owningBucketOrd))}. + */ + @ExperimentalApi + public void collect(DocIdStream stream, long owningBucketOrd) throws IOException { + stream.forEach(doc -> collect(doc, owningBucketOrd)); + } + + public void collect(int[] docIds, long owningBucketOrd) throws IOException { + for (int doc : docIds) { + collect(doc, owningBucketOrd); + } + } + @Override public void setScorer(Scorable scorer) throws IOException { // no-op by default diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/HistogramSkiplistLeafCollector.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/HistogramSkiplistLeafCollector.java new file mode 100644 index 0000000000000..2b51940ae54c1 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/HistogramSkiplistLeafCollector.java @@ -0,0 +1,171 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.search.DocIdStream; +import org.apache.lucene.search.Scorable; +import org.opensearch.common.Rounding; +import org.opensearch.search.aggregations.LeafBucketCollector; +import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; + +import java.io.IOException; + +/** + * Histogram collection logic using skip list. + * + * @opensearch.internal + */ +public class HistogramSkiplistLeafCollector extends LeafBucketCollector { + + private final NumericDocValues values; + private final DocValuesSkipper skipper; + private final Rounding.Prepared preparedRounding; + private final LongKeyedBucketOrds bucketOrds; + private final LeafBucketCollector sub; + private final BucketsAggregator aggregator; + + /** + * Max doc ID (inclusive) up to which all docs values may map to the same bucket. + */ + private int upToInclusive = -1; + + /** + * Whether all docs up to {@link #upToInclusive} values map to the same bucket. + */ + private boolean upToSameBucket; + + /** + * Index in bucketOrds for docs up to {@link #upToInclusive}. + */ + private long upToBucketIndex; + + public HistogramSkiplistLeafCollector( + NumericDocValues values, + DocValuesSkipper skipper, + Rounding.Prepared preparedRounding, + LongKeyedBucketOrds bucketOrds, + LeafBucketCollector sub, + BucketsAggregator aggregator + ) { + this.values = values; + this.skipper = skipper; + this.preparedRounding = preparedRounding; + this.bucketOrds = bucketOrds; + this.sub = sub; + this.aggregator = aggregator; + } + + @Override + public void setScorer(Scorable scorer) throws IOException { + if (sub != null) { + sub.setScorer(scorer); + } + } + + private void advanceSkipper(int doc, long owningBucketOrd) throws IOException { + if (doc > skipper.maxDocID(0)) { + skipper.advance(doc); + } + upToSameBucket = false; + + if (skipper.minDocID(0) > doc) { + // Corner case which happens if `doc` doesn't have a value and is between two intervals of + // the doc-value skip index. + upToInclusive = skipper.minDocID(0) - 1; + return; + } + + upToInclusive = skipper.maxDocID(0); + + // Now find the highest level where all docs map to the same bucket. + for (int level = 0; level < skipper.numLevels(); ++level) { + int totalDocsAtLevel = skipper.maxDocID(level) - skipper.minDocID(level) + 1; + long minBucket = preparedRounding.round(skipper.minValue(level)); + long maxBucket = preparedRounding.round(skipper.maxValue(level)); + + if (skipper.docCount(level) == totalDocsAtLevel && minBucket == maxBucket) { + // All docs at this level have a value, and all values map to the same bucket. + upToInclusive = skipper.maxDocID(level); + upToSameBucket = true; + upToBucketIndex = bucketOrds.add(owningBucketOrd, maxBucket); + if (upToBucketIndex < 0) { + upToBucketIndex = -1 - upToBucketIndex; + } + } else { + break; + } + } + } + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + if (doc > upToInclusive) { + advanceSkipper(doc, owningBucketOrd); + } + + if (upToSameBucket) { + aggregator.incrementBucketDocCount(upToBucketIndex, 1L); + sub.collect(doc, upToBucketIndex); + } else if (values.advanceExact(doc)) { + final long value = values.longValue(); + long bucketIndex = bucketOrds.add(owningBucketOrd, preparedRounding.round(value)); + if (bucketIndex < 0) { + bucketIndex = -1 - bucketIndex; + aggregator.collectExistingBucket(sub, doc, bucketIndex); + } else { + aggregator.collectBucket(sub, doc, bucketIndex); + } + } + } + + @Override + public void collect(DocIdStream stream) throws IOException { + // This will only be called if its the top agg + collect(stream, 0); + } + + @Override + public void collect(DocIdStream stream, long owningBucketOrd) throws IOException { + // This will only be called if its the sub aggregation + for (;;) { + int upToExclusive = upToInclusive + 1; + if (upToExclusive < 0) { // overflow + upToExclusive = Integer.MAX_VALUE; + } + + if (upToSameBucket) { + if (sub == NO_OP_COLLECTOR) { + // stream.count maybe faster when we don't need to handle sub-aggs + long count = stream.count(upToExclusive); + aggregator.incrementBucketDocCount(upToBucketIndex, count); + } else { + int count = 0; + int[] docBuffer = new int[64]; + int cnt = Integer.MAX_VALUE; + while (cnt != 0) { + cnt = stream.intoArray(upToExclusive, docBuffer); + sub.collect(docBuffer, upToBucketIndex); + count += cnt; + } + aggregator.incrementBucketDocCount(upToBucketIndex, count); + } + } else { + stream.forEach(upToExclusive, doc -> collect(doc, owningBucketOrd)); + } + + if (stream.mayHaveRemaining()) { + advanceSkipper(upToExclusive, owningBucketOrd); + } else { + break; + } + } + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/filterrewrite/rangecollector/SubAggRangeCollector.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/filterrewrite/rangecollector/SubAggRangeCollector.java index 5c1f21b22e646..b0be76cf645c4 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/filterrewrite/rangecollector/SubAggRangeCollector.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/filterrewrite/rangecollector/SubAggRangeCollector.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.FixedBitSet; +import org.opensearch.search.aggregations.BitSetDocIdStream; import org.opensearch.search.aggregations.BucketCollector; import org.opensearch.search.aggregations.LeafBucketCollector; import org.opensearch.search.aggregations.bucket.filterrewrite.FilterRewriteOptimizationContext; @@ -23,8 +24,6 @@ import java.util.function.BiConsumer; import java.util.function.Function; -import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; - /** * Range collector implementation that supports sub-aggregations by collecting doc IDs. */ @@ -85,10 +84,7 @@ public void finalizePreviousRange() { DocIdSetIterator iterator = bitDocIdSet.iterator(); // build a new leaf collector for each bucket LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(leafCtx); - while (iterator.nextDoc() != NO_MORE_DOCS) { - int currentDoc = iterator.docID(); - sub.collect(currentDoc, bucketOrd); - } + sub.collect(new BitSetDocIdStream(bitSet, 0), bucketOrd); logger.trace("collected sub aggregation for bucket {}", bucketOrd); } catch (IOException e) { throw new RuntimeException(e); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index 8fa9c61821fd8..7aa0fead4162f 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -31,7 +31,10 @@ package org.opensearch.search.aggregations.bucket.histogram; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; @@ -51,6 +54,7 @@ import org.opensearch.search.aggregations.LeafBucketCollectorBase; import org.opensearch.search.aggregations.bucket.DeferableBucketAggregator; import org.opensearch.search.aggregations.bucket.DeferringBucketCollector; +import org.opensearch.search.aggregations.bucket.HistogramSkiplistLeafCollector; import org.opensearch.search.aggregations.bucket.MergingBucketsDeferringCollector; import org.opensearch.search.aggregations.bucket.filterrewrite.DateHistogramAggregatorBridge; import org.opensearch.search.aggregations.bucket.filterrewrite.FilterRewriteOptimizationContext; @@ -135,6 +139,7 @@ static AutoDateHistogramAggregator build( protected int roundingIdx; protected Rounding.Prepared preparedRounding; + private final String fieldName; private final FilterRewriteOptimizationContext filterRewriteOptimizationContext; private AutoDateHistogramAggregator( @@ -218,6 +223,10 @@ protected Function bucketOrdProducer() { return (key) -> getBucketOrds().add(0, preparedRounding.round(key)); } }; + + this.fieldName = (valuesSource instanceof ValuesSource.Numeric.FieldData) + ? ((ValuesSource.Numeric.FieldData) valuesSource).getIndexFieldName() + : null; filterRewriteOptimizationContext = new FilterRewriteOptimizationContext(bridge, parent, subAggregators.length, context); } @@ -260,7 +269,21 @@ public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBuc return LeafBucketCollector.NO_OP_COLLECTOR; } + DocValuesSkipper skipper = null; + if (this.fieldName != null) { + skipper = ctx.reader().getDocValuesSkipper(this.fieldName); + } final SortedNumericDocValues values = valuesSource.longValues(ctx); + final NumericDocValues singleton = DocValues.unwrapSingleton(values); + + if (skipper != null && singleton != null) { + // TODO: add hard bounds support + // TODO: SkipListLeafCollector should be used if the getLeafCollector invocation is from + // filterRewriteOptimizationContext when parent != null. Removing the check to collect + // performance numbers for now + return new HistogramSkiplistLeafCollector(singleton, skipper, preparedRounding, getBucketOrds(), sub, this); + } + final LeafBucketCollector iteratingCollector = getLeafCollector(values, sub); return new LeafBucketCollectorBase(sub, values) { @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 7ba939f64dbbf..7d785dec6d6a1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -38,8 +38,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.search.DocIdStream; -import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.opensearch.common.Nullable; @@ -63,6 +61,7 @@ import org.opensearch.search.aggregations.StarTreeBucketCollector; import org.opensearch.search.aggregations.StarTreePreComputeCollector; import org.opensearch.search.aggregations.bucket.BucketsAggregator; +import org.opensearch.search.aggregations.bucket.HistogramSkiplistLeafCollector; import org.opensearch.search.aggregations.bucket.filterrewrite.DateHistogramAggregatorBridge; import org.opensearch.search.aggregations.bucket.filterrewrite.FilterRewriteOptimizationContext; import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; @@ -234,7 +233,10 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol if (skipper != null && singleton != null) { // TODO: add hard bounds support - if (hardBounds == null && parent == null) { + // TODO: SkipListLeafCollector should be used if the getLeafCollector invocation is from + // filterRewriteOptimizationContext when parent != null. Removing the check to collect + // performance numbers for now + if (hardBounds == null) { skipListCollectorsUsed++; return new HistogramSkiplistLeafCollector(singleton, skipper, preparedRounding, bucketOrds, sub, this); } @@ -434,149 +436,4 @@ public double bucketSize(long bucket, Rounding.DateTimeUnit unitSize) { return 1.0; } } - - private static class HistogramSkiplistLeafCollector extends LeafBucketCollector { - - private final NumericDocValues values; - private final DocValuesSkipper skipper; - private final Rounding.Prepared preparedRounding; - private final LongKeyedBucketOrds bucketOrds; - private final LeafBucketCollector sub; - private final BucketsAggregator aggregator; - - /** - * Max doc ID (inclusive) up to which all docs values may map to the same bucket. - */ - private int upToInclusive = -1; - - /** - * Whether all docs up to {@link #upToInclusive} values map to the same bucket. - */ - private boolean upToSameBucket; - - /** - * Index in bucketOrds for docs up to {@link #upToInclusive}. - */ - private long upToBucketIndex; - - HistogramSkiplistLeafCollector( - NumericDocValues values, - DocValuesSkipper skipper, - Rounding.Prepared preparedRounding, - LongKeyedBucketOrds bucketOrds, - LeafBucketCollector sub, - BucketsAggregator aggregator - ) { - this.values = values; - this.skipper = skipper; - this.preparedRounding = preparedRounding; - this.bucketOrds = bucketOrds; - this.sub = sub; - this.aggregator = aggregator; - } - - @Override - public void setScorer(Scorable scorer) throws IOException { - if (sub != null) { - sub.setScorer(scorer); - } - } - - private void advanceSkipper(int doc, long owningBucketOrd) throws IOException { - if (doc > skipper.maxDocID(0)) { - skipper.advance(doc); - } - upToSameBucket = false; - - if (skipper.minDocID(0) > doc) { - // Corner case which happens if `doc` doesn't have a value and is between two intervals of - // the doc-value skip index. - upToInclusive = skipper.minDocID(0) - 1; - return; - } - - upToInclusive = skipper.maxDocID(0); - - // Now find the highest level where all docs map to the same bucket. - for (int level = 0; level < skipper.numLevels(); ++level) { - int totalDocsAtLevel = skipper.maxDocID(level) - skipper.minDocID(level) + 1; - long minBucket = preparedRounding.round(skipper.minValue(level)); - long maxBucket = preparedRounding.round(skipper.maxValue(level)); - - if (skipper.docCount(level) == totalDocsAtLevel && minBucket == maxBucket) { - // All docs at this level have a value, and all values map to the same bucket. - upToInclusive = skipper.maxDocID(level); - upToSameBucket = true; - upToBucketIndex = bucketOrds.add(owningBucketOrd, maxBucket); - if (upToBucketIndex < 0) { - upToBucketIndex = -1 - upToBucketIndex; - } - } else { - break; - } - } - } - - @Override - public void collect(int doc, long owningBucketOrd) throws IOException { - if (doc > upToInclusive) { - advanceSkipper(doc, owningBucketOrd); - } - - if (upToSameBucket) { - aggregator.incrementBucketDocCount(upToBucketIndex, 1L); - sub.collect(doc, upToBucketIndex); - } else if (values.advanceExact(doc)) { - final long value = values.longValue(); - long bucketIndex = bucketOrds.add(owningBucketOrd, preparedRounding.round(value)); - if (bucketIndex < 0) { - bucketIndex = -1 - bucketIndex; - aggregator.collectExistingBucket(sub, doc, bucketIndex); - } else { - aggregator.collectBucket(sub, doc, bucketIndex); - } - } - } - - @Override - public void collect(int doc) throws IOException { - collect(doc, 0); - } - - @Override - public void collect(DocIdStream stream) throws IOException { - // This will only be called if its the top agg - for (;;) { - int upToExclusive = upToInclusive + 1; - if (upToExclusive < 0) { // overflow - upToExclusive = Integer.MAX_VALUE; - } - - if (upToSameBucket) { - if (sub == NO_OP_COLLECTOR) { - // stream.count maybe faster when we don't need to handle sub-aggs - long count = stream.count(upToExclusive); - aggregator.incrementBucketDocCount(upToBucketIndex, count); - } else { - final int[] count = { 0 }; - stream.forEach(upToExclusive, doc -> { - sub.collect(doc, upToBucketIndex); - count[0]++; - }); - aggregator.incrementBucketDocCount(upToBucketIndex, count[0]); - } - - } else { - stream.forEach(upToExclusive, this::collect); - } - - if (stream.mayHaveRemaining()) { - advanceSkipper(upToExclusive, 0); - } else { - break; - } - } - } - - } } diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/AvgAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/AvgAggregator.java index 5f99a9cc05558..7d36620eb8c08 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/AvgAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/AvgAggregator.java @@ -131,30 +131,39 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBuc final CompensatedSum kahanSummation = new CompensatedSum(0, 0); return new LeafBucketCollectorBase(sub, values) { + int[] docBuffer = new int[1]; + @Override public void collect(int doc, long bucket) throws IOException { + docBuffer[0] = doc; + collect(docBuffer, bucket); + } + + @Override + public void collect(int[] docBuffer, long bucket) throws IOException { counts = bigArrays.grow(counts, bucket + 1); sums = bigArrays.grow(sums, bucket + 1); compensations = bigArrays.grow(compensations, bucket + 1); - if (values.advanceExact(doc)) { - final int valueCount = values.docValueCount(); - counts.increment(bucket, valueCount); - // Compute the sum of double values with Kahan summation algorithm which is more - // accurate than naive summation. - double sum = sums.get(bucket); - double compensation = compensations.get(bucket); - - kahanSummation.reset(sum, compensation); - - for (int i = 0; i < valueCount; i++) { - double value = values.nextValue(); - kahanSummation.add(value); + double sum = sums.get(bucket); + double compensation = compensations.get(bucket); + kahanSummation.reset(sum, compensation); + int count = 0; + for (int doc : docBuffer) { + if (values.advanceExact(doc)) { + final int valueCount = values.docValueCount(); + count += valueCount; + // Compute the sum of double values with Kahan summation algorithm which is more + // accurate than naive summation. + for (int i = 0; i < valueCount; i++) { + double value = values.nextValue(); + kahanSummation.add(value); + } } - - sums.set(bucket, kahanSummation.value()); - compensations.set(bucket, kahanSummation.delta()); } + counts.increment(bucket, count); + sums.set(bucket, kahanSummation.value()); + compensations.set(bucket, kahanSummation.delta()); } }; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java index fbba20d8a6d7d..9f7cbfd4c9ff2 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java @@ -156,22 +156,30 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBuc final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx); final NumericDoubleValues values = MultiValueMode.MAX.select(allValues); return new LeafBucketCollectorBase(sub, allValues) { + int[] docBuffer = new int[1]; @Override public void collect(int doc, long bucket) throws IOException { + docBuffer[0] = doc; + collect(docBuffer, bucket); + } + + @Override + public void collect(int[] docBuffer, long bucket) throws IOException { if (bucket >= maxes.size()) { long from = maxes.size(); maxes = bigArrays.grow(maxes, bucket + 1); maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY); } - if (values.advanceExact(doc)) { - final double value = values.doubleValue(); - double max = maxes.get(bucket); - max = Math.max(max, value); - maxes.set(bucket, max); + + double max = maxes.get(bucket); + for (int doc : docBuffer) { + if (values.advanceExact(doc)) { + max = Math.max(max, values.doubleValue()); + } } + maxes.set(bucket, max); } - }; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/MinAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/MinAggregator.java index 5c2ed2b240a09..304b91eca5fe3 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/MinAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/MinAggregator.java @@ -156,20 +156,29 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBuc final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx); final NumericDoubleValues values = MultiValueMode.MIN.select(allValues); return new LeafBucketCollectorBase(sub, allValues) { + int[] docBuffer = new int[1]; @Override public void collect(int doc, long bucket) throws IOException { + docBuffer[0] = doc; + collect(docBuffer, bucket); + } + + @Override + public void collect(int[] docBuffer, long bucket) throws IOException { if (bucket >= mins.size()) { long from = mins.size(); mins = bigArrays.grow(mins, bucket + 1); mins.fill(from, mins.size(), Double.POSITIVE_INFINITY); } - if (values.advanceExact(doc)) { - final double value = values.doubleValue(); - double min = mins.get(bucket); - min = Math.min(min, value); - mins.set(bucket, min); + + double min = mins.get(bucket); + for (int doc : docBuffer) { + if (values.advanceExact(doc)) { + min = Math.min(min, values.doubleValue()); + } } + mins.set(bucket, min); } }; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/SumAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/SumAggregator.java index ba32592f75ea1..bfdda1a0dcef3 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/SumAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/SumAggregator.java @@ -120,27 +120,34 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBuc final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx); final CompensatedSum kahanSummation = new CompensatedSum(0, 0); return new LeafBucketCollectorBase(sub, values) { + int[] docBuffer = new int[1]; + @Override public void collect(int doc, long bucket) throws IOException { + docBuffer[0] = doc; + collect(docBuffer, bucket); + } + + @Override + public void collect(int[] docBuffer, long bucket) throws IOException { sums = bigArrays.grow(sums, bucket + 1); compensations = bigArrays.grow(compensations, bucket + 1); - if (values.advanceExact(doc)) { - final int valuesCount = values.docValueCount(); - // Compute the sum of double values with Kahan summation algorithm which is more - // accurate than naive summation. - double sum = sums.get(bucket); - double compensation = compensations.get(bucket); - kahanSummation.reset(sum, compensation); - - for (int i = 0; i < valuesCount; i++) { - double value = values.nextValue(); - kahanSummation.add(value); + // Compute the sum of double values with Kahan summation algorithm which is more + // accurate than naive summation. + double sum = sums.get(bucket); + double compensation = compensations.get(bucket); + kahanSummation.reset(sum, compensation); + for (int doc : docBuffer) { + if (values.advanceExact(doc)) { + for (int i = 0; i < values.docValueCount(); i++) { + double value = values.nextValue(); + kahanSummation.add(value); + } } - - compensations.set(bucket, kahanSummation.delta()); - sums.set(bucket, kahanSummation.value()); } + compensations.set(bucket, kahanSummation.delta()); + sums.set(bucket, kahanSummation.value()); } }; } diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 90f4d20ad07a5..7524cc1e7bd05 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,3 +1,4 @@ org.opensearch.index.codec.composite.composite912.Composite912Codec -org.opensearch.index.codec.composite.composite103.Composite103Codec +org.opensearch.index.codec.composite.composite104.Composite104Codec +org.opensearch.index.codec.composite.backward_codecs.composite103.Composite103Codec org.opensearch.index.codec.composite.backward_codecs.composite101.Composite101Codec diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index e77ae4518ef3e..54764272b5f15 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; @@ -48,7 +48,7 @@ import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.IndexAnalyzers; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.similarity.SimilarityService; @@ -71,58 +71,58 @@ public class CodecTests extends OpenSearchTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(false); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene103Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene104Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService(false).codec("default"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_SPEED, codec); } public void testDefaultWithCompositeIndex() throws Exception { Codec codec = createCodecService(false, true).codec("default"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_SPEED, codec); - assert codec instanceof Composite103Codec; + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_SPEED, codec); + assert codec instanceof Composite104Codec; } public void testBestCompression() throws Exception { Codec codec = createCodecService(false).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_COMPRESSION, codec); } public void testBestCompressionWithCompositeIndex() throws Exception { Codec codec = createCodecService(false, true).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_COMPRESSION, codec); - assert codec instanceof Composite103Codec; + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_COMPRESSION, codec); + assert codec instanceof Composite104Codec; } public void testLZ4() throws Exception { Codec codec = createCodecService(false).codec("lz4"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_SPEED, codec); assert codec instanceof PerFieldMappingPostingFormatCodec; } public void testLZ4WithCompositeIndex() throws Exception { Codec codec = createCodecService(false, true).codec("lz4"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_SPEED, codec); - assert codec instanceof Composite103Codec; + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_SPEED, codec); + assert codec instanceof Composite104Codec; } public void testZlib() throws Exception { Codec codec = createCodecService(false).codec("zlib"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_COMPRESSION, codec); assert codec instanceof PerFieldMappingPostingFormatCodec; } public void testZlibWithCompositeIndex() throws Exception { Codec codec = createCodecService(false, true).codec("zlib"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_COMPRESSION, codec); - assert codec instanceof Composite103Codec; + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_COMPRESSION, codec); + assert codec instanceof Composite104Codec; } public void testResolveDefaultCodecsWithCompositeIndex() throws Exception { CodecService codecService = createCodecService(false, true); - assertThat(codecService.codec("default"), instanceOf(Composite103Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Composite104Codec.class)); } public void testBestCompressionWithCompressionLevel() { @@ -157,12 +157,12 @@ public void testLuceneCodecsWithCompressionLevel() { public void testDefaultMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("default"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_SPEED, codec); } public void testBestCompressionMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene103Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene104Codec.Mode.BEST_COMPRESSION, codec); } public void testExceptionCodecNull() { @@ -174,11 +174,11 @@ public void testExceptionIndexSettingsNull() { } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene103Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene104Codec.Mode expected, Codec actual) throws Exception { SegmentReader sr = getSegmentReader(actual); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene103Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene104Codec.Mode.valueOf(v)); } private CodecService createCodecService(boolean isMapperServiceNull) throws IOException { diff --git a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java index 620bdb2f2a01b..81631ad46d7ff 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java @@ -8,8 +8,8 @@ package org.opensearch.index.codec.composite; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.SegmentInfo; @@ -17,7 +17,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.Version; -import org.opensearch.index.codec.composite.backward_codecs.composite101.Composite101Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.test.OpenSearchTestCase; import org.junit.After; import org.junit.Before; @@ -49,7 +49,7 @@ public void testGetDocValuesConsumerForCompositeCodec() throws IOException { randomInt(), false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), @@ -73,7 +73,7 @@ public void testGetDocValuesConsumerForCompositeCodec() throws IOException { ); assertEquals("org.apache.lucene.codecs.lucene90.Lucene90DocValuesConsumer", consumer.getClass().getName()); - assertEquals(CompositeCodecFactory.COMPOSITE_CODEC, Composite101Codec.COMPOSITE_INDEX_CODEC_NAME); + assertEquals(CompositeCodecFactory.COMPOSITE_CODEC, Composite104Codec.COMPOSITE_INDEX_CODEC_NAME); consumer.close(); } diff --git a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java index 8b9def45129e2..9c3a95b9ee608 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java @@ -8,9 +8,9 @@ package org.opensearch.index.codec.composite; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.SegmentInfo; @@ -53,7 +53,7 @@ public void testGetDocValuesProducerForCompositeCodec99() throws IOException { randomInt(), false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java index c281ad469be16..16d5307e99732 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.Version; @@ -28,7 +28,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.MapperTestUtils; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings; import org.opensearch.index.mapper.MapperService; @@ -75,7 +75,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - Codec codec = new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + Codec codec = new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); return codec; } diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java index d80142d1b8e15..e23e6c59b66fe 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java @@ -8,9 +8,9 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; @@ -115,7 +115,7 @@ public static void setup() throws IOException { 5, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java index e735dea580559..391625dd3393b 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java @@ -8,8 +8,8 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; @@ -526,7 +526,7 @@ public static SegmentReadState getReadState( numDocs, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), writeState.segmentInfo.getId(), new HashMap<>(), @@ -580,7 +580,7 @@ public static SegmentWriteState getWriteState(int numDocs, byte[] id, FieldInfo[ numDocs, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), id, new HashMap<>(), diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java index 7796c73e36049..34a7737cfa5f6 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java @@ -8,9 +8,9 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; @@ -641,7 +641,7 @@ public void test_build_multipleStarTrees() throws IOException { 7, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), @@ -925,7 +925,7 @@ public void test_build_starTreeDataset() throws IOException { 7, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilderTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilderTests.java index f9625445de2bb..767f81586bd69 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilderTests.java @@ -8,9 +8,9 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.SegmentInfo; @@ -65,7 +65,7 @@ public void setUp() throws Exception { 5, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java index e6f3f24dd82ed..bb90ebc38435d 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java @@ -8,7 +8,7 @@ package org.opensearch.index.compositeindex.datacube.startree.fileformats.meta; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; @@ -81,7 +81,7 @@ public void setup() throws IOException { 6, false, false, - new Lucene101Codec(), + new Lucene104Codec(), new HashMap<>(), UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), new HashMap<>(), diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java index 28e3177d725f1..130486fb10129 100644 --- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java @@ -31,15 +31,15 @@ package org.opensearch.index.engine; -import org.apache.lucene.backward_codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion104PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.opensearch.OpenSearchException; @@ -69,8 +69,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion101PostingsFormat(); - indexWriterConfig.setCodec(new Lucene101Codec() { + final PostingsFormat postingsFormat = new Completion104PostingsFormat(); + indexWriterConfig.setCodec(new Lucene104Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/DateHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/DateHistogramAggregatorTests.java index 396d9a5f27b50..8665241e4c7c4 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/DateHistogramAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -31,7 +31,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.Dimension; @@ -82,7 +82,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeDateHistogram() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/DateRangeQueryTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/DateRangeQueryTests.java index e115e4e9eed7f..3f967b25d4fea 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/DateRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/DateRangeQueryTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -32,7 +32,7 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.Dimension; @@ -87,7 +87,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeValidDateRangeQuery() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/KeywordTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/KeywordTermsAggregatorTests.java index ce567eff034d9..c0469434eb6f7 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/KeywordTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/KeywordTermsAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -33,7 +33,7 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -85,7 +85,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeKeywordTerms() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java index 0dc575d4ea928..056cb181c23a3 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.Field; @@ -43,7 +43,7 @@ import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.Metric; @@ -124,7 +124,7 @@ protected Codec getCodec( } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeDocValues() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/MultiTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/MultiTermsAggregatorTests.java index 769630974ce48..a587d30c04950 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/MultiTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/MultiTermsAggregatorTests.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -32,7 +32,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -86,7 +86,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testMultiTermsWithStarTree() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/NumericTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/NumericTermsAggregatorTests.java index 28919b9539b13..b8c8c34b338d7 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/NumericTermsAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; @@ -31,7 +31,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -81,7 +81,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeNumericTerms() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/RangeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/RangeAggregatorTests.java index 30b5a852040b0..e3ef7d62fcde7 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/RangeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/RangeAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; @@ -29,7 +29,7 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -75,7 +75,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testRangeAggregation() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java index 1d8fc3a460a24..b32cd6ebea6ac 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; @@ -28,7 +28,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; @@ -85,7 +85,7 @@ protected Codec getCodec(int maxLeafDoc, boolean skipStarNodeCreationForSDVDimen } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeFilterWithNoDocsInSVDField() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeNestedAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeNestedAggregatorTests.java index f94ac2424ecbe..c72dabc76238e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeNestedAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeNestedAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene103.Lucene103Codec; +import org.apache.lucene.codecs.lucene104.Lucene104Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -32,7 +32,7 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite103.Composite103Codec; +import org.opensearch.index.codec.composite.composite104.Composite104Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.Dimension; @@ -98,7 +98,7 @@ protected Codec getCodec() { } catch (IOException e) { throw new RuntimeException(e); } - return new Composite103Codec(Lucene103Codec.Mode.BEST_SPEED, mapperService, testLogger); + return new Composite104Codec(Lucene104Codec.Mode.BEST_SPEED, mapperService, testLogger); } public void testStarTreeNestedAggregations() throws IOException { diff --git a/test/framework/licenses/lucene-codecs-10.3.1.jar.sha1 b/test/framework/licenses/lucene-codecs-10.3.1.jar.sha1 deleted file mode 100644 index e8434856f411d..0000000000000 --- a/test/framework/licenses/lucene-codecs-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2e7a8493a514d66449e1acbce8608de396418722 \ No newline at end of file diff --git a/test/framework/licenses/lucene-codecs-10.4.0-snapshot-a8c1455.jar.sha1 b/test/framework/licenses/lucene-codecs-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..940afb04c4ad2 --- /dev/null +++ b/test/framework/licenses/lucene-codecs-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +9bbe3928fb20ece18edd8a6f025f55b48d060c91 \ No newline at end of file diff --git a/test/framework/licenses/lucene-test-framework-10.3.1.jar.sha1 b/test/framework/licenses/lucene-test-framework-10.3.1.jar.sha1 deleted file mode 100644 index 55da1f242e51f..0000000000000 --- a/test/framework/licenses/lucene-test-framework-10.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc0c9079bf5c7d660989bb37e822cd53e65ae103 \ No newline at end of file diff --git a/test/framework/licenses/lucene-test-framework-10.4.0-snapshot-a8c1455.jar.sha1 b/test/framework/licenses/lucene-test-framework-10.4.0-snapshot-a8c1455.jar.sha1 new file mode 100644 index 0000000000000..4c1c3fbf2db88 --- /dev/null +++ b/test/framework/licenses/lucene-test-framework-10.4.0-snapshot-a8c1455.jar.sha1 @@ -0,0 +1 @@ +1e46f7eaf8bfd6eb41d6fbeb29e03165ad629e8c \ No newline at end of file