|
18 | 18 | import org.apache.http.impl.client.BasicCredentialsProvider; |
19 | 19 | import org.apache.logging.log4j.core.config.plugins.util.PluginManager; |
20 | 20 | import org.apache.lucene.util.IOConsumer; |
21 | | -import org.elasticsearch.ExceptionsHelper; |
22 | 21 | import org.elasticsearch.client.Request; |
23 | 22 | import org.elasticsearch.client.Response; |
24 | 23 | import org.elasticsearch.client.ResponseException; |
|
32 | 31 | import org.elasticsearch.inference.TaskType; |
33 | 32 | import org.elasticsearch.logging.LogManager; |
34 | 33 | import org.elasticsearch.logging.Logger; |
| 34 | +import org.elasticsearch.test.ESTestCase; |
35 | 35 | import org.elasticsearch.test.rest.ESRestTestCase; |
36 | 36 | import org.elasticsearch.xcontent.XContentType; |
37 | 37 |
|
|
45 | 45 | import java.util.List; |
46 | 46 | import java.util.Map; |
47 | 47 | import java.util.Set; |
48 | | -import java.util.concurrent.ExecutorService; |
49 | | -import java.util.concurrent.Executors; |
50 | | -import java.util.concurrent.Future; |
| 48 | +import java.util.concurrent.Semaphore; |
51 | 49 | import java.util.stream.Collectors; |
52 | 50 | import java.util.stream.Stream; |
53 | 51 |
|
@@ -404,56 +402,35 @@ private static void loadDataSetIntoEs( |
404 | 402 | IndexCreator indexCreator |
405 | 403 | ) throws IOException { |
406 | 404 | Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); |
407 | | - Set<TestDataset> datasets = availableDatasetsForEs(supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled); |
408 | | - ExecutorService executor = Executors.newFixedThreadPool(PARALLEL_THREADS); |
409 | | - try { |
410 | | - executeInParallel( |
411 | | - executor, |
412 | | - datasets, |
413 | | - dataset -> createIndex(client, dataset, indexCreator), |
414 | | - "Failed to create indices in parallel" |
415 | | - ); |
| 405 | + List<TestDataset> datasets = availableDatasetsForEs(supportsIndexModeLookup, supportsSourceFieldMapping, inferenceEnabled).stream() |
| 406 | + .toList(); |
416 | 407 |
|
417 | | - executeInParallel(executor, datasets, dataset -> loadData(client, dataset, logger), "Failed to load data in parallel"); |
| 408 | + executeInParallel(datasets, dataset -> createIndex(client, dataset, indexCreator), "Failed to create indices in parallel"); |
418 | 409 |
|
419 | | - forceMerge(client, datasets.stream().map(d -> d.indexName).collect(Collectors.toSet()), logger); |
| 410 | + executeInParallel(datasets, dataset -> loadData(client, dataset, logger), "Failed to load data in parallel"); |
420 | 411 |
|
421 | | - executeInParallel( |
422 | | - executor, |
423 | | - ENRICH_POLICIES, |
424 | | - policy -> loadEnrichPolicy(client, policy.policyName, policy.policyFileName, logger), |
425 | | - "Failed to load enrich policies in parallel" |
426 | | - ); |
427 | | - } finally { |
428 | | - executor.shutdown(); |
429 | | - } |
430 | | - } |
| 412 | + forceMerge(client, datasets.stream().map(d -> d.indexName).collect(Collectors.toSet()), logger); |
431 | 413 |
|
432 | | - private static <T> void executeInParallel(ExecutorService executor, Iterable<T> items, IOConsumer<T> consumer, String errorMessage) |
433 | | - throws IOException { |
434 | | - List<Future<?>> futures = new ArrayList<>(); |
435 | | - for (T item : items) { |
436 | | - futures.add(executor.submit(() -> { |
437 | | - try { |
438 | | - consumer.accept(item); |
439 | | - } catch (IOException e) { |
440 | | - throw new RuntimeException(e); |
441 | | - } |
442 | | - })); |
443 | | - } |
| 414 | + executeInParallel( |
| 415 | + ENRICH_POLICIES, |
| 416 | + policy -> loadEnrichPolicy(client, policy.policyName, policy.policyFileName, logger), |
| 417 | + "Failed to load enrich policies in parallel" |
| 418 | + ); |
| 419 | + |
| 420 | + } |
444 | 421 |
|
445 | | - RuntimeException exception = null; |
446 | | - for (Future<?> future : futures) { |
| 422 | + private static <T> void executeInParallel(List<T> items, IOConsumer<T> consumer, String errorMessage) { |
| 423 | + Semaphore semaphore = new Semaphore(PARALLEL_THREADS); |
| 424 | + ESTestCase.runInParallel(items.size(), i -> { |
447 | 425 | try { |
448 | | - future.get(); |
449 | | - } catch (Exception e) { |
450 | | - exception = ExceptionsHelper.useOrSuppress(exception, ExceptionsHelper.convertToRuntime(e)); |
| 426 | + semaphore.acquire(); |
| 427 | + consumer.accept(items.get(i)); |
| 428 | + } catch (IOException | InterruptedException e) { |
| 429 | + throw new RuntimeException(errorMessage, e); |
| 430 | + } finally { |
| 431 | + semaphore.release(); |
451 | 432 | } |
452 | | - } |
453 | | - |
454 | | - if (exception != null) { |
455 | | - throw new IOException(errorMessage, exception); |
456 | | - } |
| 433 | + }); |
457 | 434 | } |
458 | 435 |
|
459 | 436 | public static void createInferenceEndpoints(RestClient client) throws IOException { |
|
0 commit comments