Skip to content

Commit 7176455

Browse files
committed
Merge remote-tracking branch 'origin/master' into fetch/values-lookup
2 parents d850a98 + 1e61059 commit 7176455

File tree

554 files changed

+8554
-3810
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

554 files changed

+8554
-3810
lines changed

.ci/bwcVersions

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,5 +30,7 @@ BWC_VERSION:
3030
- "7.10.2"
3131
- "7.11.0"
3232
- "7.11.1"
33+
- "7.11.2"
3334
- "7.12.0"
35+
- "7.13.0"
3436
- "8.0.0"
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# This file is used with all of the non-matrix tests in Jenkins.
2+
3+
# This .properties file defines the versions of Java with which to
4+
# build and test Elasticsearch for this branch. Valid Java versions
5+
# are 'java' or 'openjdk' followed by the major release number.
6+
7+
ES_BUILD_JAVA=adoptopenjdk15
8+
ES_RUNTIME_JAVA=adoptopenjdk11
9+
GRADLE_TASK=build

.ci/os.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ else
4848
fi
4949

5050
sudo bash -c 'cat > /etc/sudoers.d/elasticsearch_vars' << SUDOERS_VARS
51+
Defaults env_keep += "ES_JAVA_HOME"
5152
Defaults env_keep += "JAVA_HOME"
5253
Defaults env_keep += "SYSTEM_JAVA_HOME"
5354
SUDOERS_VARS
@@ -63,6 +64,7 @@ sudo mkdir -p /elasticsearch/qa/ && sudo chown jenkins /elasticsearch/qa/ && ln
6364
sudo -E env \
6465
PATH=$BUILD_JAVA_HOME/bin:`sudo bash -c 'echo -n $PATH'` \
6566
RUNTIME_JAVA_HOME=`readlink -f -n $RUNTIME_JAVA_HOME` \
67+
--unset=ES_JAVA_HOME \
6668
--unset=JAVA_HOME \
6769
SYSTEM_JAVA_HOME=`readlink -f -n $RUNTIME_JAVA_HOME` \
6870
./gradlew -g $HOME/.gradle --scan --parallel --continue $@

Vagrantfile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -461,6 +461,7 @@ def sh_install_deps(config,
461461
ensure expect
462462
463463
cat \<\<SUDOERS_VARS > /etc/sudoers.d/elasticsearch_vars
464+
Defaults env_keep += "ES_JAVA_HOME"
464465
Defaults env_keep += "JAVA_HOME"
465466
Defaults env_keep += "SYSTEM_JAVA_HOME"
466467
SUDOERS_VARS

benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/bucket/terms/LongKeyedBucketOrdsBenchmark.java

Lines changed: 72 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -41,14 +41,18 @@ public class LongKeyedBucketOrdsBenchmark {
4141
/**
4242
* The number of distinct values to add to the buckets.
4343
*/
44-
private static final long DISTINCT_VALUES = 10;
44+
private static final long DISTINCT_VALUES = 210;
4545
/**
4646
* The number of buckets to create in the {@link #multiBucket} case.
4747
* <p>
48-
* If this is not relatively prime to {@link #DISTINCT_VALUES} then the
49-
* values won't be scattered evenly across the buckets.
48+
* If this is not relatively prime to {@link #DISTINCT_VALUES_IN_BUCKETS}
49+
* then the values won't be scattered evenly across the buckets.
5050
*/
5151
private static final long DISTINCT_BUCKETS = 21;
52+
/**
53+
* Number of distinct values to add to values within buckets.
54+
*/
55+
private static final long DISTINCT_VALUES_IN_BUCKETS = 10;
5256

5357
private final PageCacheRecycler recycler = new PageCacheRecycler(Settings.EMPTY);
5458
private final BigArrays bigArrays = new BigArrays(recycler, null, "REQUEST");
@@ -63,6 +67,7 @@ public class LongKeyedBucketOrdsBenchmark {
6367
public void forceLoadClasses(Blackhole bh) {
6468
bh.consume(LongKeyedBucketOrds.FromSingle.class);
6569
bh.consume(LongKeyedBucketOrds.FromMany.class);
70+
bh.consume(LongKeyedBucketOrds.FromManySmall.class);
6671
}
6772

6873
/**
@@ -75,6 +80,9 @@ public void singleBucketIntoSingleImmutableMonmorphicInvocation(Blackhole bh) {
7580
for (long i = 0; i < LIMIT; i++) {
7681
ords.add(0, i % DISTINCT_VALUES);
7782
}
83+
if (ords.size() != DISTINCT_VALUES) {
84+
throw new IllegalArgumentException("Expected [" + DISTINCT_VALUES + "] but found [" + ords.size() + "]");
85+
}
7886
bh.consume(ords);
7987
}
8088
}
@@ -83,11 +91,14 @@ public void singleBucketIntoSingleImmutableMonmorphicInvocation(Blackhole bh) {
8391
* Emulates the way that most aggregations use {@link LongKeyedBucketOrds}.
8492
*/
8593
@Benchmark
86-
public void singleBucketIntoSingleImmutableBimorphicInvocation(Blackhole bh) {
94+
public void singleBucketIntoSingleImmutableMegamorphicInvocation(Blackhole bh) {
8795
try (LongKeyedBucketOrds ords = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.ONE)) {
8896
for (long i = 0; i < LIMIT; i++) {
8997
ords.add(0, i % DISTINCT_VALUES);
9098
}
99+
if (ords.size() != DISTINCT_VALUES) {
100+
throw new IllegalArgumentException("Expected [" + DISTINCT_VALUES + "] but found [" + ords.size() + "]");
101+
}
91102
bh.consume(ords);
92103
}
93104
}
@@ -106,6 +117,10 @@ public void singleBucketIntoSingleMutableMonmorphicInvocation(Blackhole bh) {
106117
}
107118
ords.add(0, i % DISTINCT_VALUES);
108119
}
120+
if (ords.size() != DISTINCT_VALUES) {
121+
ords.close();
122+
throw new IllegalArgumentException("Expected [" + DISTINCT_VALUES + "] but found [" + ords.size() + "]");
123+
}
109124
bh.consume(ords);
110125
ords.close();
111126
}
@@ -116,7 +131,7 @@ public void singleBucketIntoSingleMutableMonmorphicInvocation(Blackhole bh) {
116131
* {@link #singleBucketIntoSingleMutableMonmorphicInvocation monomorphic invocation}.
117132
*/
118133
@Benchmark
119-
public void singleBucketIntoSingleMutableBimorphicInvocation(Blackhole bh) {
134+
public void singleBucketIntoSingleMutableMegamorphicInvocation(Blackhole bh) {
120135
LongKeyedBucketOrds ords = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.ONE);
121136
for (long i = 0; i < LIMIT; i++) {
122137
if (i % 100_000 == 0) {
@@ -125,7 +140,9 @@ public void singleBucketIntoSingleMutableBimorphicInvocation(Blackhole bh) {
125140
ords = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.ONE);
126141
}
127142
ords.add(0, i % DISTINCT_VALUES);
128-
143+
}
144+
if (ords.size() != DISTINCT_VALUES) {
145+
throw new IllegalArgumentException("Expected [" + DISTINCT_VALUES + "] but found [" + ords.size() + "]");
129146
}
130147
bh.consume(ords);
131148
ords.close();
@@ -134,28 +151,68 @@ public void singleBucketIntoSingleMutableBimorphicInvocation(Blackhole bh) {
134151
/**
135152
* Emulates an aggregation that collects from a single bucket "by accident".
136153
* This can happen if an aggregation is under, say, a {@code terms}
137-
* aggregation and there is only a single value for that term in the index.
154+
* aggregation and there is only a single value for that term in the index
155+
* but we can't tell that up front.
138156
*/
139157
@Benchmark
140158
public void singleBucketIntoMulti(Blackhole bh) {
141159
try (LongKeyedBucketOrds ords = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.MANY)) {
142-
for (long i = 0; i < LIMIT; i++) {
143-
ords.add(0, i % DISTINCT_VALUES);
144-
}
160+
singleBucketIntoMultiSmall(ords);
145161
bh.consume(ords);
146162
}
147163
}
148164

165+
/**
166+
* Emulates an aggregation that collects from a single bucket "by accident"
167+
* and gets a "small" bucket ords. This can happen to a {@code terms} inside
168+
* of another {@code terms} when the "inner" terms only even has a single
169+
* bucket.
170+
*/
171+
@Benchmark
172+
public void singleBucketIntoMultiSmall(Blackhole bh) {
173+
try (LongKeyedBucketOrds ords = new LongKeyedBucketOrds.FromManySmall(bigArrays, 60)) {
174+
singleBucketIntoMultiSmall(ords);
175+
bh.consume(ords);
176+
}
177+
}
178+
179+
private void singleBucketIntoMultiSmall(LongKeyedBucketOrds ords) {
180+
for (long i = 0; i < LIMIT; i++) {
181+
ords.add(0, i % DISTINCT_VALUES);
182+
}
183+
if (ords.size() != DISTINCT_VALUES) {
184+
throw new IllegalArgumentException("Expected [" + DISTINCT_VALUES + "] but found [" + ords.size() + "]");
185+
}
186+
}
187+
188+
/**
189+
* Emulates an aggregation that collects from many buckets with a known
190+
* bounds on the values.
191+
*/
192+
@Benchmark
193+
public void multiBucketManySmall(Blackhole bh) {
194+
try (LongKeyedBucketOrds ords = new LongKeyedBucketOrds.FromManySmall(bigArrays, 5)) {
195+
multiBucket(bh, ords);
196+
}
197+
}
198+
149199
/**
150200
* Emulates an aggregation that collects from many buckets.
151201
*/
152202
@Benchmark
153-
public void multiBucket(Blackhole bh) {
203+
public void multiBucketMany(Blackhole bh) {
154204
try (LongKeyedBucketOrds ords = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.MANY)) {
155-
for (long i = 0; i < LIMIT; i++) {
156-
ords.add(i % DISTINCT_BUCKETS, i % DISTINCT_VALUES);
157-
}
158-
bh.consume(ords);
205+
multiBucket(bh, ords);
159206
}
160207
}
208+
209+
private void multiBucket(Blackhole bh, LongKeyedBucketOrds ords) {
210+
for (long i = 0; i < LIMIT; i++) {
211+
ords.add(i % DISTINCT_BUCKETS, i % DISTINCT_VALUES_IN_BUCKETS);
212+
}
213+
if (ords.size() != DISTINCT_VALUES) {
214+
throw new IllegalArgumentException("Expected [" + DISTINCT_VALUES + "] but found [" + ords.size() + "]");
215+
}
216+
bh.consume(ords);
217+
}
161218
}

build.gradle

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,10 +115,10 @@ subprojects {
115115
* }
116116
*
117117
* */
118-
ext.testArtifact = { p ->
118+
ext.testArtifact = { p, String name = "test" ->
119119
def projectDependency = p.dependencies.create(p)
120120
projectDependency.capabilities {
121-
requireCapabilities("org.elasticsearch.gradle:${projectDependency.name}-test-artifacts")
121+
requireCapabilities("org.elasticsearch.gradle:${projectDependency.name}-${name}-artifacts")
122122
};
123123
}
124124

buildSrc/build.gradle

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,6 @@ dependencies {
8888
api 'org.apache.commons:commons-compress:1.19'
8989
api 'org.apache.ant:ant:1.10.8'
9090
api 'com.netflix.nebula:gradle-extra-configurations-plugin:5.0.1'
91-
api 'com.netflix.nebula:nebula-publishing-plugin:17.3.2'
9291
api 'com.netflix.nebula:gradle-info-plugin:9.2.0'
9392
api 'org.apache.rat:apache-rat:0.11'
9493
api "org.elasticsearch:jna:5.5.0"
@@ -245,7 +244,7 @@ if (project != rootProject) {
245244
maxParallelForks = providers.systemProperty('tests.jvms').forUseAtConfigurationTime().getOrElse(org.elasticsearch.gradle.info.BuildParams.defaultParallel.toString()) as Integer
246245
}
247246

248-
publishing.publications.named("nebula").configure {
247+
publishing.publications.named("elastic").configure {
249248
suppressPomMetadataWarningsFor("testFixturesApiElements")
250249
suppressPomMetadataWarningsFor("testFixturesRuntimeElements")
251250
}

buildSrc/src/integTest/groovy/org/elasticsearch/gradle/PublishPluginFuncTest.groovy

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ import org.xmlunit.builder.Input
1515

1616
class PublishPluginFuncTest extends AbstractGradleFuncTest {
1717

18-
def "published pom takes es project description into account"() {
18+
def "artifacts and tweaked pom is published"() {
1919
given:
2020
buildFile << """
2121
plugins {
@@ -33,6 +33,9 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
3333

3434
then:
3535
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
36+
file("build/distributions/hello-world-1.0.jar").exists()
37+
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
38+
file("build/distributions/hello-world-1.0-sources.jar").exists()
3639
file("build/distributions/hello-world-1.0.pom").exists()
3740
assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """
3841
<project xmlns="http://maven.apache.org/POM/4.0.0"
@@ -48,7 +51,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
4851
)
4952
}
5053

51-
def "generates pom for shadowed elasticsearch plugin"() {
54+
def "generates artifacts for shadowed elasticsearch plugin"() {
5255
given:
5356
file('license.txt') << "License file"
5457
file('notice.txt') << "Notice file"
@@ -64,6 +67,14 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
6467
classname 'org.acme.HelloWorldPlugin'
6568
description = "custom project description"
6669
}
70+
71+
publishing {
72+
repositories {
73+
maven {
74+
url = "\$buildDir/repo"
75+
}
76+
}
77+
}
6778
6879
// requires elasticsearch artifact available
6980
tasks.named('bundlePlugin').configure { enabled = false }
@@ -74,10 +85,14 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
7485
"""
7586

7687
when:
77-
def result = gradleRunner('generatePom').build()
88+
def result = gradleRunner('assemble', '--stacktrace').build()
7889

7990
then:
8091
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
92+
file("build/distributions/hello-world-plugin-1.0-original.jar").exists()
93+
file("build/distributions/hello-world-plugin-1.0.jar").exists()
94+
file("build/distributions/hello-world-plugin-1.0-javadoc.jar").exists()
95+
file("build/distributions/hello-world-plugin-1.0-sources.jar").exists()
8196
file("build/distributions/hello-world-plugin-1.0.pom").exists()
8297
assertXmlEquals(file("build/distributions/hello-world-plugin-1.0.pom").text, """
8398
<project xmlns="http://maven.apache.org/POM/4.0.0"
@@ -180,7 +195,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
180195
"""
181196

182197
when:
183-
def result = gradleRunner('generatePom', 'validateNebulaPom').build()
198+
def result = gradleRunner('generatePom', 'validatElasticPom').build()
184199

185200
then:
186201
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
@@ -226,6 +241,16 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
226241
.build()
227242
diff.differences.each { difference ->
228243
println difference
244+
}
245+
if(diff.differences.size() > 0) {
246+
println """ given:
247+
$toTest
248+
"""
249+
println """ expected:
250+
$expected
251+
"""
252+
253+
229254
}
230255
assert diff.hasDifferences() == false
231256
true

buildSrc/src/integTest/groovy/org/elasticsearch/gradle/RestResourcesPluginFuncTest.groovy

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
4949
then:
5050
result.task(':copyRestApiSpecsTask').outcome == TaskOutcome.SUCCESS
5151
result.task(':copyYamlTestsTask').outcome == TaskOutcome.NO_SOURCE
52-
file("/build/resources/test/rest-api-spec/api/" + api).exists()
52+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + api).exists()
5353
}
5454

5555
def "restResources copies all core API (but not x-pack) by default for projects with copied tests"() {
@@ -81,11 +81,11 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
8181
then:
8282
result.task(':copyRestApiSpecsTask').outcome == TaskOutcome.SUCCESS
8383
result.task(':copyYamlTestsTask').outcome == TaskOutcome.SUCCESS
84-
file("/build/resources/test/rest-api-spec/api/" + apiCore1).exists()
85-
file("/build/resources/test/rest-api-spec/api/" + apiCore2).exists()
86-
file("/build/resources/test/rest-api-spec/api/" + apiXpack).exists() == false //x-pack specs must be explicitly configured
87-
file("/build/resources/test/rest-api-spec/test/" + coreTest).exists()
88-
file("/build/resources/test/rest-api-spec/test/" + xpackTest).exists()
84+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiCore1).exists()
85+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiCore2).exists()
86+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiXpack).exists() == false //x-pack specs must be explicitly configured
87+
file("/build/restResources/yamlTests/rest-api-spec/test/" + coreTest).exists()
88+
file("/build/restResources/yamlTests/rest-api-spec/test/" + xpackTest).exists()
8989
}
9090
9191
def "restResources copies API by configuration"() {
@@ -115,10 +115,10 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
115115
then:
116116
result.task(':copyRestApiSpecsTask').outcome == TaskOutcome.SUCCESS
117117
result.task(':copyYamlTestsTask').outcome == TaskOutcome.NO_SOURCE
118-
file("/build/resources/test/rest-api-spec/api/" + apiFoo).exists()
119-
file("/build/resources/test/rest-api-spec/api/" + apiXpackFoo).exists()
120-
file("/build/resources/test/rest-api-spec/api/" + apiBar).exists() ==false
121-
file("/build/resources/test/rest-api-spec/api/" + apiXpackBar).exists() == false
118+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiFoo).exists()
119+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiXpackFoo).exists()
120+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiBar).exists() ==false
121+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiXpackBar).exists() == false
122122
}
123123
124124
def "restResources copies Tests and API by configuration"() {
@@ -154,11 +154,11 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
154154
then:
155155
result.task(':copyRestApiSpecsTask').outcome == TaskOutcome.SUCCESS
156156
result.task(':copyYamlTestsTask').outcome == TaskOutcome.SUCCESS
157-
file("/build/resources/test/rest-api-spec/api/" + apiCore1).exists()
158-
file("/build/resources/test/rest-api-spec/api/" + apiCore2).exists()
159-
file("/build/resources/test/rest-api-spec/api/" + apiXpack).exists()
160-
file("/build/resources/test/rest-api-spec/test/" + coreTest).exists()
161-
file("/build/resources/test/rest-api-spec/test/" + xpackTest).exists()
157+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiCore1).exists()
158+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiCore2).exists()
159+
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiXpack).exists()
160+
file("/build/restResources/yamlTests/rest-api-spec/test/" + coreTest).exists()
161+
file("/build/restResources/yamlTests/rest-api-spec/test/" + xpackTest).exists()
162162
163163
when:
164164
result = gradleRunner("copyRestApiSpecsTask").build()

0 commit comments

Comments
 (0)