Skip to content

Commit 9b383d3

Browse files
committed
Merge remote-tracking branch 'upstream/main' into synthetic_vectors
2 parents bf61ed2 + 82b6e45 commit 9b383d3

File tree

149 files changed

+5153
-3089
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

149 files changed

+5153
-3089
lines changed

benchmarks/src/main/java/org/elasticsearch/benchmark/vector/OptimizedScalarQuantizerBenchmark.java

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,8 @@ public class OptimizedScalarQuantizerBenchmark {
4343

4444
float[] vector;
4545
float[] centroid;
46-
byte[] destination;
46+
byte[] legacyDestination;
47+
int[] destination;
4748

4849
@Param({ "1", "4", "7" })
4950
byte bits;
@@ -54,7 +55,8 @@ public class OptimizedScalarQuantizerBenchmark {
5455
public void init() {
5556
ThreadLocalRandom random = ThreadLocalRandom.current();
5657
// random byte arrays for binary methods
57-
destination = new byte[dims];
58+
legacyDestination = new byte[dims];
59+
destination = new int[dims];
5860
vector = new float[dims];
5961
centroid = new float[dims];
6062
for (int i = 0; i < dims; ++i) {
@@ -65,13 +67,20 @@ public void init() {
6567

6668
@Benchmark
6769
public byte[] scalar() {
68-
osq.scalarQuantize(vector, destination, bits, centroid);
69-
return destination;
70+
osq.legacyScalarQuantize(vector, legacyDestination, bits, centroid);
71+
return legacyDestination;
72+
}
73+
74+
@Benchmark
75+
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
76+
public byte[] legacyVector() {
77+
osq.legacyScalarQuantize(vector, legacyDestination, bits, centroid);
78+
return legacyDestination;
7079
}
7180

7281
@Benchmark
7382
@Fork(jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" })
74-
public byte[] vector() {
83+
public int[] vector() {
7584
osq.scalarQuantize(vector, destination, bits, centroid);
7685
return destination;
7786
}

build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,8 @@ private void addNameAndDescriptionToPom(Project project, NamedDomainObjectSet<Ma
175175
private static void configureWithShadowPlugin(Project project, MavenPublication publication) {
176176
var shadow = project.getExtensions().getByType(ShadowExtension.class);
177177
shadow.component(publication);
178+
publication.artifact(project.getTasks().named("javadocJar"));
179+
publication.artifact(project.getTasks().named("sourcesJar"));
178180
}
179181

180182
private static void addScmInfo(XmlProvider xml, GitInfo gitInfo) {

build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java

Lines changed: 104 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,11 @@
3434
import java.io.File;
3535
import java.util.List;
3636
import java.util.Map;
37+
import java.util.stream.Stream;
3738

3839
import javax.inject.Inject;
3940

41+
import static java.util.stream.Collectors.joining;
4042
import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams;
4143
import static org.elasticsearch.gradle.util.FileUtils.mkdirs;
4244
import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure;
@@ -173,6 +175,16 @@ public void execute(Task t) {
173175
// we use 'temp' relative to CWD since this is per JVM and tests are forbidden from writing to CWD
174176
nonInputProperties.systemProperty("java.io.tmpdir", test.getWorkingDir().toPath().resolve("temp"));
175177

178+
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
179+
SourceSet mainSourceSet = sourceSets.findByName(SourceSet.MAIN_SOURCE_SET_NAME);
180+
SourceSet testSourceSet = sourceSets.findByName(SourceSet.TEST_SOURCE_SET_NAME);
181+
if ("test".equals(test.getName()) && mainSourceSet != null && testSourceSet != null) {
182+
FileCollection mainRuntime = mainSourceSet.getRuntimeClasspath();
183+
FileCollection testRuntime = testSourceSet.getRuntimeClasspath();
184+
FileCollection testOnlyFiles = testRuntime.minus(mainRuntime);
185+
test.doFirst(task -> test.environment("es.entitlement.testOnlyPath", testOnlyFiles.getAsPath()));
186+
}
187+
176188
test.systemProperties(getProviderFactory().systemPropertiesPrefixedBy("tests.").get());
177189
test.systemProperties(getProviderFactory().systemPropertiesPrefixedBy("es.").get());
178190

@@ -205,46 +217,122 @@ public void execute(Task t) {
205217
}
206218

207219
/*
208-
* If this project builds a shadow JAR than any unit tests should test against that artifact instead of
220+
* If this project builds a shadow JAR then any unit tests should test against that artifact instead of
209221
* compiled class output and dependency jars. This better emulates the runtime environment of consumers.
210222
*/
211223
project.getPluginManager().withPlugin("com.gradleup.shadow", p -> {
212224
if (test.getName().equals(JavaPlugin.TEST_TASK_NAME)) {
213225
// Remove output class files and any other dependencies from the test classpath, since the shadow JAR includes these
214-
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
215-
FileCollection mainRuntime = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME).getRuntimeClasspath();
216226
// Add any "shadow" dependencies. These are dependencies that are *not* bundled into the shadow JAR
217227
Configuration shadowConfig = project.getConfigurations().getByName(ShadowBasePlugin.CONFIGURATION_NAME);
218228
// Add the shadow JAR artifact itself
219229
FileCollection shadowJar = project.files(project.getTasks().named("shadowJar"));
220-
FileCollection testRuntime = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME).getRuntimeClasspath();
230+
FileCollection mainRuntime = mainSourceSet.getRuntimeClasspath();
231+
FileCollection testRuntime = testSourceSet.getRuntimeClasspath();
221232
test.setClasspath(testRuntime.minus(mainRuntime).plus(shadowConfig).plus(shadowJar));
222233
}
223234
});
224235
});
225-
configureImmutableCollectionsPatch(project);
236+
configureJavaBaseModuleOptions(project);
237+
configureEntitlements(project);
238+
}
239+
240+
/**
241+
* Computes and sets the {@code --patch-module=java.base} and {@code --add-opens=java.base} JVM command line options.
242+
*/
243+
private void configureJavaBaseModuleOptions(Project project) {
244+
project.getTasks().withType(Test.class).matching(task -> task.getName().equals("test")).configureEach(test -> {
245+
FileCollection patchedImmutableCollections = patchedImmutableCollections(project);
246+
if (patchedImmutableCollections != null) {
247+
test.getInputs().files(patchedImmutableCollections);
248+
test.systemProperty("tests.hackImmutableCollections", "true");
249+
}
250+
251+
FileCollection entitlementBridge = entitlementBridge(project);
252+
if (entitlementBridge != null) {
253+
test.getInputs().files(entitlementBridge);
254+
}
255+
256+
test.getJvmArgumentProviders().add(() -> {
257+
String javaBasePatch = Stream.concat(
258+
singleFilePath(patchedImmutableCollections).map(str -> str + "/java.base"),
259+
singleFilePath(entitlementBridge)
260+
).collect(joining(File.pathSeparator));
261+
262+
return javaBasePatch.isEmpty()
263+
? List.of()
264+
: List.of("--patch-module=java.base=" + javaBasePatch, "--add-opens=java.base/java.util=ALL-UNNAMED");
265+
});
266+
});
226267
}
227268

228-
private void configureImmutableCollectionsPatch(Project project) {
269+
private Stream<String> singleFilePath(FileCollection collection) {
270+
return Stream.ofNullable(collection).filter(fc -> fc.isEmpty() == false).map(FileCollection::getSingleFile).map(File::toString);
271+
}
272+
273+
private static FileCollection patchedImmutableCollections(Project project) {
229274
String patchProject = ":test:immutable-collections-patch";
230275
if (project.findProject(patchProject) == null) {
231-
return; // build tests may not have this project, just skip
276+
return null; // build tests may not have this project, just skip
232277
}
233278
String configurationName = "immutableCollectionsPatch";
234279
FileCollection patchedFileCollection = project.getConfigurations()
235280
.create(configurationName, config -> config.setCanBeConsumed(false));
236281
var deps = project.getDependencies();
237282
deps.add(configurationName, deps.project(Map.of("path", patchProject, "configuration", "patch")));
238-
project.getTasks().withType(Test.class).matching(task -> task.getName().equals("test")).configureEach(test -> {
239-
test.getInputs().files(patchedFileCollection);
240-
test.systemProperty("tests.hackImmutableCollections", "true");
241-
test.getJvmArgumentProviders()
242-
.add(
243-
() -> List.of(
244-
"--patch-module=java.base=" + patchedFileCollection.getSingleFile() + "/java.base",
245-
"--add-opens=java.base/java.util=ALL-UNNAMED"
246-
)
283+
return patchedFileCollection;
284+
}
285+
286+
private static FileCollection entitlementBridge(Project project) {
287+
return project.getConfigurations().findByName("entitlementBridge");
288+
}
289+
290+
/**
291+
* Sets the required JVM options and system properties to enable entitlement enforcement on tests.
292+
* <p>
293+
* One command line option is set in {@link #configureJavaBaseModuleOptions} out of necessity,
294+
* since the command line can have only one {@code --patch-module} option for a given module.
295+
*/
296+
private static void configureEntitlements(Project project) {
297+
Configuration agentConfig = project.getConfigurations().create("entitlementAgent");
298+
Project agent = project.findProject(":libs:entitlement:agent");
299+
if (agent != null) {
300+
agentConfig.defaultDependencies(
301+
deps -> { deps.add(project.getDependencies().project(Map.of("path", ":libs:entitlement:agent"))); }
302+
);
303+
}
304+
FileCollection agentFiles = agentConfig;
305+
306+
Configuration bridgeConfig = project.getConfigurations().create("entitlementBridge");
307+
Project bridge = project.findProject(":libs:entitlement:bridge");
308+
if (bridge != null) {
309+
bridgeConfig.defaultDependencies(
310+
deps -> { deps.add(project.getDependencies().project(Map.of("path", ":libs:entitlement:bridge"))); }
311+
);
312+
}
313+
FileCollection bridgeFiles = bridgeConfig;
314+
315+
project.getTasks().withType(Test.class).configureEach(test -> {
316+
// See also SystemJvmOptions.maybeAttachEntitlementAgent.
317+
318+
// Agent
319+
if (agentFiles.isEmpty() == false) {
320+
test.getInputs().files(agentFiles);
321+
test.systemProperty("es.entitlement.agentJar", agentFiles.getAsPath());
322+
test.systemProperty("jdk.attach.allowAttachSelf", true);
323+
}
324+
325+
// Bridge
326+
if (bridgeFiles.isEmpty() == false) {
327+
String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming,jdk.net";
328+
test.getInputs().files(bridgeFiles);
329+
// Tests may not be modular, but the JDK still is
330+
test.jvmArgs(
331+
"--add-exports=java.base/org.elasticsearch.entitlement.bridge=ALL-UNNAMED,"
332+
+ modulesContainingEntitlementInstrumentation
247333
);
334+
}
248335
});
249336
}
337+
250338
}

build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ private ChangelogEntry makeHighlightsEntry(int pr, boolean notable) {
188188
}
189189

190190
private String getResource(String name) throws Exception {
191-
return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8);
191+
return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8).replace("\r", "");
192192
}
193193

194194
private void writeResource(String name, String contents) throws Exception {

build-tools/src/main/java/org/elasticsearch/gradle/test/TestBuildInfoPlugin.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,11 @@
1818
import org.gradle.api.provider.ProviderFactory;
1919
import org.gradle.api.tasks.SourceSet;
2020
import org.gradle.api.tasks.SourceSetContainer;
21+
import org.gradle.api.tasks.testing.Test;
2122
import org.gradle.language.jvm.tasks.ProcessResources;
2223

24+
import java.util.List;
25+
2326
import javax.inject.Inject;
2427

2528
/**
@@ -53,5 +56,11 @@ public void apply(Project project) {
5356
project.getTasks().withType(ProcessResources.class).named("processResources").configure(task -> {
5457
task.into("META-INF", copy -> copy.from(testBuildInfoTask));
5558
});
59+
60+
if (project.getRootProject().getName().equals("elasticsearch")) {
61+
project.getTasks().withType(Test.class).matching(test -> List.of("test").contains(test.getName())).configureEach(test -> {
62+
test.systemProperty("es.entitlement.enableForTests", "true");
63+
});
64+
}
5665
}
5766
}

docs/changelog/127797.yaml

Lines changed: 0 additions & 6 deletions
This file was deleted.

docs/changelog/129003.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 129003
2+
summary: Allow timeout during trained model download process
3+
area: Machine Learning
4+
type: bug
5+
issues: []

docs/changelog/129725.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 129725
2+
summary: Throw a 400 when sorting for all types of range fields
3+
area: Search
4+
type: bug
5+
issues: []

docs/reference/elasticsearch/configuration-reference/data-stream-lifecycle-settings.md

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@ navigation_title: "Data stream lifecycle settings"
33
mapped_pages:
44
- https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-lifecycle-settings.html
55
applies_to:
6-
deployment:
7-
self:
6+
stack: all
87
---
98

109
# Data stream lifecycle settings in {{es}} [data-stream-lifecycle-settings]
@@ -58,21 +57,24 @@ $$$data-streams-lifecycle-signalling-error-retry-interval$$$
5857

5958
## Index level settings [_index_level_settings]
6059

60+
:::{include} ../index-settings/_snippets/serverless-availability.md
61+
:::
62+
6163
The following index-level settings are typically configured on the backing indices of a data stream.
6264

6365
$$$index-lifecycle-prefer-ilm$$$
6466

6567
`index.lifecycle.prefer_ilm`
66-
: ([Dynamic](docs-content://deploy-manage/stack-settings.md#dynamic-cluster-setting), boolean) This setting determines which feature is managing the backing index of a data stream if, and only if, the backing index has an [{{ilm}}](docs-content://manage-data/lifecycle/index-lifecycle-management.md) ({{ilm-init}}) policy and the data stream has also a built-in lifecycle. When `true` this index is managed by {{ilm-init}}, when `false` the backing index is managed by the data stream lifecycle. Defaults to `true`.
68+
: ([Dynamic](../index-settings/index.md#index-modules-settings-description), boolean) This setting determines which feature is managing the backing index of a data stream if, and only if, the backing index has an [{{ilm}}](docs-content://manage-data/lifecycle/index-lifecycle-management.md) ({{ilm-init}}) policy and the data stream has also a built-in lifecycle. When `true` this index is managed by {{ilm-init}}, when `false` the backing index is managed by the data stream lifecycle. Defaults to `true`.
6769

6870
$$$index-data-stream-lifecycle-origination-date$$$
6971

70-
`index.lifecycle.origination_date`
71-
: ([Dynamic](docs-content://deploy-manage/stack-settings.md#dynamic-cluster-setting), long) If specified, this is the timestamp used to calculate the backing index generation age after this backing index has been [rolled over](docs-content://manage-data/lifecycle/index-lifecycle-management/rollover.md). The generation age is used to determine data retention, consequently, you can use this setting if you create a backing index that contains older data and want to ensure that the retention period or other parts of the lifecycle will be applied based on the data’s original timestamp and not the timestamp they got indexed. Specified as a Unix epoch value in milliseconds.
72+
`index.lifecycle.origination_date` {applies_to}`serverless: all`
73+
: ([Dynamic](../index-settings/index.md#index-modules-settings-description), long) If specified, this is the timestamp used to calculate the backing index generation age after this backing index has been [rolled over](docs-content://manage-data/lifecycle/index-lifecycle-management/rollover.md). The generation age is used to determine data retention, consequently, you can use this setting if you create a backing index that contains older data and want to ensure that the retention period or other parts of the lifecycle will be applied based on the data’s original timestamp and not the timestamp they got indexed. Specified as a Unix epoch value in milliseconds.
7274

7375
## Reindex settings [reindex-data-stream-settings]
7476

75-
You can use the following settings to control the behavior of the reindex data stream API:
77+
You can use the following cluster-level settings to control the behavior of the reindex data stream API:
7678

7779
`migrate.max_concurrent_indices_reindexed_per_data_stream` ([Dynamic](docs-content://deploy-manage/stack-settings.md#dynamic-cluster-setting)) The number of backing indices within a given data stream which will be reindexed concurrently. Defaults to `1`.
7880

0 commit comments

Comments
 (0)