From 87c16ee1de04a8db5c251dbff400349939aaee32 Mon Sep 17 00:00:00 2001 From: Mashhur <99575341+mashhurs@users.noreply.github.com> Date: Thu, 12 Dec 2024 13:48:00 -0800 Subject: [PATCH 1/2] [8.x] Backport of #190, dynamic CI pr/build/e2e jobs for target branch. (#200) * Sync up with ES main on `PipelineConfiguration` class changes. (#190) * Sync up with ES main that PipelineConfiguration class getConfigAsMap() method renamed to getConfig(). * Request modifiable donfig when creating a pipeline. * Fixes PR CI jobs, adds build and E2E CI jobs. * Fix the E2E test jobs. * Generate CI steps based on defined test matrix. * Apply suggestions from code review - print target branch for debugging purpose - add return type `None` to void method. Co-authored-by: Cas Donoghue * Replace the matrix with origin URL (after landing). --------- Co-authored-by: Cas Donoghue (cherry picked from commit 8e4c02a53455f176d1f86589a22d53888b56094e) --- .buildkite/build-pipeline.yml | 31 +++- .buildkite/e2e-pipeline.yml | 31 +++- .buildkite/pull-request-pipeline.yml | 82 +++------ .../scripts/build-pipeline/generate-steps.py | 102 +++++++++++ .buildkite/scripts/e2e-pipeline/README.md | 56 ++++++ .buildkite/scripts/e2e-pipeline/__init__.py | 0 .buildkite/scripts/e2e-pipeline/bootstrap.py | 165 ++++++++++++++++++ .../scripts/e2e-pipeline/config/pipeline.conf | 32 ++++ .../config/serverless_pipeline.conf | 32 ++++ .../scripts/e2e-pipeline/generate-steps.py | 86 +++++++++ .../scripts/e2e-pipeline/logstash_stats.py | 16 ++ .buildkite/scripts/e2e-pipeline/main.py | 60 +++++++ .../scripts/e2e-pipeline/plugin_test.py | 61 +++++++ .../scripts/e2e-pipeline/requirements.txt | 2 + .buildkite/scripts/e2e-pipeline/util.py | 32 ++++ .../pull-request-pipeline/generate-steps.py | 111 ++++++++++++ .buildkite/scripts/run_e2e_tests.sh | 53 ++++++ .buildkite/scripts/run_tests.sh | 14 -- gradle.properties | 2 +- .../IngestPipelineFactory.java | 2 +- ...archPipelineConfigurationResolverTest.java | 4 +- .../PipelineConfigurationFactoryTest.java | 10 +- 22 files changed, 906 insertions(+), 78 deletions(-) create mode 100644 .buildkite/scripts/build-pipeline/generate-steps.py create mode 100644 .buildkite/scripts/e2e-pipeline/README.md create mode 100644 .buildkite/scripts/e2e-pipeline/__init__.py create mode 100644 .buildkite/scripts/e2e-pipeline/bootstrap.py create mode 100644 .buildkite/scripts/e2e-pipeline/config/pipeline.conf create mode 100644 .buildkite/scripts/e2e-pipeline/config/serverless_pipeline.conf create mode 100644 .buildkite/scripts/e2e-pipeline/generate-steps.py create mode 100644 .buildkite/scripts/e2e-pipeline/logstash_stats.py create mode 100644 .buildkite/scripts/e2e-pipeline/main.py create mode 100644 .buildkite/scripts/e2e-pipeline/plugin_test.py create mode 100644 .buildkite/scripts/e2e-pipeline/requirements.txt create mode 100644 .buildkite/scripts/e2e-pipeline/util.py create mode 100644 .buildkite/scripts/pull-request-pipeline/generate-steps.py create mode 100755 .buildkite/scripts/run_e2e_tests.sh diff --git a/.buildkite/build-pipeline.yml b/.buildkite/build-pipeline.yml index 020bae2e..9b4193d8 100644 --- a/.buildkite/build-pipeline.yml +++ b/.buildkite/build-pipeline.yml @@ -1 +1,30 @@ -# Intentional empty definition because it is in 8.x \ No newline at end of file +- label: "Build pipeline" + command: | + #!/usr/bin/env bash + set -eo pipefail + + echo "--- Downloading prerequisites" + python3 -m pip install ruamel.yaml + python3 -m pip install requests + curl -fsSL --retry-max-time 60 --retry 3 --retry-delay 5 -o /usr/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 + chmod a+x /usr/bin/yq + + export TARGET_BRANCH="$BUILDKITE_BRANCH" + + echo "--- Generating steps dynamically" + set +e + python3 .buildkite/scripts/build-pipeline/generate-steps.py > pipeline_steps.yml + if [[ $$? -ne 0 ]]; then + echo "^^^ +++" + echo "There was a problem with generating pipeline steps." + cat pipeline_steps.yml + echo "Exiting now." + exit 1 + else + set -eo pipefail + cat pipeline_steps.yml | yq . + fi + + set -eo pipefail + echo "--- Uploading steps to buildkite" + cat pipeline_steps.yml | buildkite-agent pipeline upload diff --git a/.buildkite/e2e-pipeline.yml b/.buildkite/e2e-pipeline.yml index 020bae2e..76340ab0 100644 --- a/.buildkite/e2e-pipeline.yml +++ b/.buildkite/e2e-pipeline.yml @@ -1 +1,30 @@ -# Intentional empty definition because it is in 8.x \ No newline at end of file +- label: "E2E pipeline" + command: | + #!/usr/bin/env bash + set -eo pipefail + + echo "--- Downloading prerequisites" + python3 -m pip install ruamel.yaml + python3 -m pip install requests + curl -fsSL --retry-max-time 60 --retry 3 --retry-delay 5 -o /usr/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 + chmod a+x /usr/bin/yq + + export TARGET_BRANCH="$BUILDKITE_BRANCH" + + echo "--- Generating steps dynamically" + set +e + python3 .buildkite/scripts/e2e-pipeline/generate-steps.py > pipeline_steps.yml + if [[ $$? -ne 0 ]]; then + echo "^^^ +++" + echo "There was a problem with generating pipeline steps." + cat pipeline_steps.yml + echo "Exiting now." + exit 1 + else + set -eo pipefail + cat pipeline_steps.yml | yq . + fi + + set -eo pipefail + echo "--- Uploading steps to buildkite" + cat pipeline_steps.yml | buildkite-agent pipeline upload diff --git a/.buildkite/pull-request-pipeline.yml b/.buildkite/pull-request-pipeline.yml index 0cb5f0f4..c942b3c9 100644 --- a/.buildkite/pull-request-pipeline.yml +++ b/.buildkite/pull-request-pipeline.yml @@ -1,53 +1,29 @@ -# define a GCP VM agent to support container management (by default agent doesn't support) -agents: - provider: "gcp" - machineType: "n1-standard-4" - image: family/core-ubuntu-2204 - -steps: - # ------------- Unit tests --------------------- - - label: ":hammer: Unit tests with LS & ES 8.current :docker:" - # Builds the plugin (with current changes) with the version defined in gradle.properties - # and against LS and ES 8.current - # Runs unit tests on LS & ES 8.current docker - command: - - .buildkite/scripts/run_tests.sh - env: - ELASTIC_STACK_VERSION: "8.current" - INTEGRATION: false - - - label: ":hammer: Unit tests with LS & ES 8.current-SNAPSHOT :docker:" - # Builds the plugin (with current changes) with the version defined in gradle.properties - # and against LS and ES 8.current-SNAPSHOT - # Runs unit tests on LS & ES 8.current-SNAPSHOT docker - command: - - .buildkite/scripts/run_tests.sh - env: - ELASTIC_STACK_VERSION: "8.current" - SNAPSHOT: true - INTEGRATION: false - - # ------------- Integration tests --------------------- - - label: ":hammer: Integration tests with LS & ES 8.current :docker:" - # Builds the plugin (with current changes) with the version defined in gradle.properties - # and against LS and ES 8.current - # Runs integration tests on LS & ES 8.current docker - command: - - .buildkite/scripts/run_tests.sh - env: - ELASTIC_STACK_VERSION: "8.current" - INTEGRATION: true - SECURE_INTEGRATION: true - - - label: ":hammer: Integration tests with LS & ES 8.current-SNAPSHOT :docker:" - # Builds the plugin (with current changes) with the version defined in gradle.properties - # and against LS and ES 8.current-SNAPSHOT - # Runs integration tests on LS & ES 8.current-SNAPSHOT docker - command: - - .buildkite/scripts/run_tests.sh - env: - ELASTIC_STACK_VERSION: "8.current" - SNAPSHOT: true - INTEGRATION: true - SECURE_INTEGRATION: true - LOG_LEVEL: "info" +- label: "Pull request pipeline" + command: | + #!/usr/bin/env bash + set -eo pipefail + echo "--- Downloading prerequisites" + python3 -m pip install ruamel.yaml + python3 -m pip install requests + curl -fsSL --retry-max-time 60 --retry 3 --retry-delay 5 -o /usr/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 + chmod a+x /usr/bin/yq + + export TARGET_BRANCH="$GITHUB_PR_TARGET_BRANCH" + + echo "--- Generating steps dynamically" + set +e + python3 .buildkite/scripts/pull-request-pipeline/generate-steps.py > pipeline_steps.yml + if [[ $$? -ne 0 ]]; then + echo "^^^ +++" + echo "There was a problem with generating pipeline steps." + cat pipeline_steps.yml + echo "Exiting now." + exit 1 + else + set -eo pipefail + cat pipeline_steps.yml | yq . + fi + + set -eo pipefail + echo "--- Uploading steps to buildkite" + cat pipeline_steps.yml | buildkite-agent pipeline upload diff --git a/.buildkite/scripts/build-pipeline/generate-steps.py b/.buildkite/scripts/build-pipeline/generate-steps.py new file mode 100644 index 00000000..3c166410 --- /dev/null +++ b/.buildkite/scripts/build-pipeline/generate-steps.py @@ -0,0 +1,102 @@ +import os +import requests +import sys +import typing +from requests.adapters import HTTPAdapter, Retry + +from ruamel.yaml import YAML + +RELEASES_URL = "https://raw.githubusercontent.com/elastic/logstash/main/ci/logstash_releases.json" +TEST_COMMAND: typing.final = ".buildkite/scripts/run_tests.sh" + + +def generate_unit_and_integration_test_steps(stack_version, snapshot) -> list[typing.Any]: + test_steps = [] + + # step-1, unit tests + label_unit_test: typing.final = f"Unit test for {stack_version}, snapshot: {snapshot}" + test_steps.append({ + "label": label_unit_test, + "command": TEST_COMMAND, + "env": { + "SNAPSHOT": snapshot, + "ELASTIC_STACK_VERSION": stack_version, + "INTEGRATION": "false" + } + }) + + # step-2, integration tests + label_integration_test: typing.final = f"Integration test for {stack_version}, snapshot: {snapshot}" + test_steps.append({ + "label": label_integration_test, + "command": TEST_COMMAND, + "env": { + "SNAPSHOT": snapshot, + "ELASTIC_STACK_VERSION": stack_version, + "INTEGRATION": "true", + "SECURE_INTEGRATION": "true", + "LOG_LEVEL": "info" + } + }) + return test_steps + + +def call_url_with_retry(url: str, max_retries: int = 5, delay: int = 1) -> requests.Response: + schema = "https://" if "https://" in url else "http://" + session = requests.Session() + # retry on most common failures such as connection timeout(408), etc... + retries = Retry(total=max_retries, backoff_factor=delay, status_forcelist=[408, 502, 503, 504]) + session.mount(schema, HTTPAdapter(max_retries=retries)) + return session.get(url) + + +if __name__ == "__main__": + structure = { + "agents": { + "provider": "gcp", + "machineType": "n1-standard-4", + "image": "family/core-ubuntu-2204" + }, + "steps": []} + + steps = [] + response = call_url_with_retry(RELEASES_URL) + versions_json = response.json() + + # there are situations to manually run CIs with PR change, + # set MANUAL_TARGET_BRANCH with upstream target branch and run + manually_set_target_branch: typing.final = os.getenv("MANUAL_TARGET_BRANCH") + target_branch: typing.final = manually_set_target_branch if manually_set_target_branch else os.getenv("TARGET_BRANCH") + print(f"Running with target_branch: {target_branch}") + if target_branch == '8.x': + full_stack_version: typing.final = versions_json["snapshots"]["8.future"] + steps += generate_unit_and_integration_test_steps(full_stack_version, "true") + elif target_branch == 'main': + full_stack_version: typing.final = versions_json["snapshots"][target_branch] + steps += generate_unit_and_integration_test_steps(full_stack_version, "true") + else: + # generate steps for the version if released + releases = versions_json["releases"] + for release_version in releases: + if releases[release_version].startswith(target_branch): + steps += generate_unit_and_integration_test_steps(releases[release_version], "false") + break + + # steps for snapshot version + snapshots = versions_json["snapshots"] + for snapshot_version in snapshots: + if snapshots[snapshot_version].startswith(target_branch): + steps += generate_unit_and_integration_test_steps(snapshots[snapshot_version], "false") + break + + group_desc = f"{target_branch} branch steps" + key_desc = "pr-and-build-steps" + structure["steps"].append({ + "group": group_desc, + "key": key_desc, + "steps": steps + }) + + print( + '# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json') + YAML().dump(structure, sys.stdout) diff --git a/.buildkite/scripts/e2e-pipeline/README.md b/.buildkite/scripts/e2e-pipeline/README.md new file mode 100644 index 00000000..f62be8fd --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/README.md @@ -0,0 +1,56 @@ + +## Getting started +E2E tests can be also run on your local machine. + +### Prerequisites + +#### Building the plugin +E2E tries to install the plugin on Logstash container from the gem file. To generate a gem: +1. Building plugin requires Logstash +- If you have `LOGSTASH_PATH` already defined, skip this step +- You can also download Logstash and export its path to `LOGSTASH_PATH` +- OR build from source + ```shell + git clone --single-branch https://github.com/elastic/logstash.git + cd logstash && ./gradlew clean bootstrap assemble installDefaultGems && cd .. + LOGSTASH_PATH=$(pwd)/logstash + export LOGSTASH_PATH + ``` +2. Run the following command: +```shell +./gradlew clean vendor localGem +``` + +#### Defining a project type +If you want to run tests with serverless, this will be for you. +Defaults to `on_prems` where local stack containers will be spun up and tested. +```bash +export E2E_PROJECT_TYPE="serverless" +``` + +In order to run tests with serverless, you also need to export `EC_API_KEY` which is an organization API key to create a project. +In the pipelines, this will be automatically retrieved from Vault services. + +#### Stack version +E2E also requires `ELASTIC_STACK_VERSION` (ex: "8.12.0") environment variable in order to test against. +Make sure to export it before running. In the Buildkite pipeline, this var will be resolved and exported. + +#### Installing dependencies +Make sure you have python installed on you local +```bash +pip install -r .buildkite/scripts/e2e-pipeline/requirements.txt +``` + +### Run +Run the following command from the repo dir: +```bash +python3 .buildkite/scripts/e2e-pipeline/main.py +``` + +## Troubleshooting +- The project retries on some operations to overcome timeout issues, uses [`retry` tool](https://formulae.brew.sh/formula/retry). If you get `retry` undefined error, make sure to install it. +``` +brew install retry +``` + +- If you run multiple times, currently cloning `integrations` repo will fail, so make sure to remove the repo folder or comment out the `bootstrap.py#__clone_integrations_repo()` method diff --git a/.buildkite/scripts/e2e-pipeline/__init__.py b/.buildkite/scripts/e2e-pipeline/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.buildkite/scripts/e2e-pipeline/bootstrap.py b/.buildkite/scripts/e2e-pipeline/bootstrap.py new file mode 100644 index 00000000..3f6462aa --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/bootstrap.py @@ -0,0 +1,165 @@ +""" +E2E bootstrapping with Python script + - A script to resolve stack versions + - Download and spin up elastic-package + - Clone integrations repo and prepare packages + - When E2E finishes, teardown the stack +""" +import os +import sys +import time +import util + + +class Bootstrap: + ELASTIC_PACKAGE_DISTRO_URL = "https://api.github.com/repos/elastic/elastic-package/releases/latest" + LOGSTASH_CONTAINER_NAME = "elastic-package-stack-e2e-logstash-1" + PLUGIN_NAME = "logstash-filter-elastic_integration" + + def __init__(self, stack_version: str, project_type: str) -> None: + f""" + A constructor of the {Bootstrap}. + + Args: + stack_version: An Elastic stack version where {Bootstrap} spins up with + project_type: type of the project running with, example serverless or on-prems + + Returns: + Validates and sets stack version, project type and resolves elastic package distro based on running OS (sys.platform) + """ + self.stack_version = stack_version + self.__validate_and_set_project_type(project_type) + self.__resolve_distro() + + def __validate_and_set_project_type(self, project_type: str) -> None: + project_types = ["on_prems", "serverless"] + if project_type not in project_types: + raise ValueError(f"project_type accepts {project_types} only") + self.project_type = project_type + print(f"Project type: {project_type}") + + def __resolve_distro(self) -> None: + print(f"Platform: {sys.platform}") + platforms = ["darwin", "linux"] + if sys.platform not in platforms: + raise ValueError(f"Unsupported {sys.platform}, E2E can on {platforms} only") + self.distro = "darwin_amd64.tar.gz" if sys.platform == "darwin" else "linux_amd64.tar.gz" + + def __download_elastic_package(self) -> None: + response = util.call_url_with_retry(self.ELASTIC_PACKAGE_DISTRO_URL) + release_info = response.json() + + download_urls = [asset["browser_download_url"] for asset in release_info["assets"]] + download_url = [url for url in download_urls if self.distro in url][0] + if download_url.strip() == '': + raise Exception(f"Could not resolve elastic-package distro download URL, release info: {release_info}.") + + file_name = "downloaded_elastic_package_" + self.distro + # downloading with `urllib3` gives a different size file which causes a corrupted file issue + util.run_or_raise_error(["curl", "-o", file_name, "--retry", "5", "--retry-delay", "5", "-fSL", download_url], + "Failed to download elastic-package") + print("elastic-package is successfully downloaded.") + + # Extract the downloaded tar.gz file + util.run_or_raise_error(["tar", "zxf", file_name], + f"Error occurred while unzipping {file_name}") + + def __make_elastic_package_global(self) -> None: + util.run_or_raise_error(["sudo", "mv", "elastic-package", "/usr/local/bin"], + "Could not make `elastic-package` global") + + def __clone_integrations_repo(self) -> None: + util.run_or_raise_error(["retry", "-t", "3", "--", "git", "clone", "--single-branch", + "https://github.com/elastic/integrations.git"], + "Error occurred while cloning an integrations repo. Check logs for details.") + + def __get_profile_path(self) -> str: + return os.path.join(util.get_home_path(), ".elastic-package/profiles/e2e") + + def __create_config_file(self, sample_config_file: str, config_file: str) -> None: + with open(sample_config_file, "r") as infile, open(config_file, "w") as outfile: + for line in infile: + if "stack.logstash_enabled: true" in line: + # Logstash is disabled by default, remove the comment + line = line.lstrip('#').lstrip() + outfile.write(line) + + def __setup_elastic_package_profile(self) -> None: + # Although profile doesn't exist, profile delete process will get succeeded. + util.run_or_raise_error(["elastic-package", "profiles", "delete", "e2e"], + "Error occurred while deleting and then creating a profile. Check logs for details.") + + util.run_or_raise_error(["elastic-package", "profiles", "create", "e2e"], + "Error occurred while creating a profile. Check logs for details.") + + print("`elastic-package` e2e profile created.") + + # elastic-package creates a profile under home directory + config_example_file = os.path.join(self.__get_profile_path(), "config.yml.example") + config_file = os.path.join(self.__get_profile_path(), "config.yml") + self.__create_config_file(config_example_file, config_file) + util.run_or_raise_error(["elastic-package", "profiles", "use", "e2e"], + "Error occurred while creating a profile. Check logs for details.") + + def __install_plugin(self) -> None: + with open("VERSION", "r") as f: + version = f.read() + + plugin_name = f"logstash-filter-elastic_integration-{version.strip()}-java.gem" + util.run_or_raise_error(["docker", "cp", plugin_name, f"{self.LOGSTASH_CONTAINER_NAME}:/usr/share/logstash"], + "Error occurred while copying plugin to container, see logs for details.") + + print("Installing logstash-filter-elastic_integration plugin...") + util.run_or_raise_error( + ["docker", "exec", self.LOGSTASH_CONTAINER_NAME, "bin/logstash-plugin", "install", plugin_name], + "Error occurred installing plugin in Logstash container") + print("Plugin installed successfully.") + + def __reload_container(self) -> None: + print("Restarting Logstash container after installing the plugin.") + util.run_or_raise_error(["docker", "restart", f"{self.LOGSTASH_CONTAINER_NAME}"], + "Error occurred while reloading Logstash container, see logs for details.") + time.sleep(20) # give a time Logstash pipeline to fully start + + def __update_pipeline_config(self) -> None: + local_config_file_path = ".buildkite/scripts/e2e-pipeline/config/" + config_file = "serverless_pipeline.conf" if self.project_type == "serverless" else "pipeline.conf" + local_config_file = local_config_file_path + config_file + container_config_file_path = "/usr/share/logstash/pipeline/logstash.conf" + # python docker client (internally uses subprocess) requires special TAR header with tar operations + util.run_or_raise_error(["docker", "cp", f"{local_config_file}", + f"{self.LOGSTASH_CONTAINER_NAME}:{container_config_file_path}"], + "Error occurred while replacing pipeline config, see logs for details") + time.sleep(20) # give a time Logstash pipeline to fully start + + def __spin_stack(self) -> None: + try: + # elastic-package stack up -d --version "${ELASTIC_STACK_VERSION}" + commands = ["elastic-package", "stack", "up", "-d", "--version", self.stack_version] + if self.project_type == "serverless": + commands.extend(["--provider", "serverless"]) + util.run_or_raise_error(commands, + "Error occurred while running stacks with elastic-package. Check logs for details.") + except Exception as ex: + self.__teardown_stack() # some containers left running, make sure to stop them + + def __teardown_stack(self) -> None: + util.run_or_raise_error(["elastic-package", "stack", "down"], + "Error occurred while stopping stacks with elastic-package. Check logs for details.") + + def run_elastic_stack(self) -> None: + """ + Downloads elastic-package, creates a profile and runs ELK, Fleet, ERP and elastic-agent + """ + self.__download_elastic_package() + self.__make_elastic_package_global() + self.__clone_integrations_repo() + self.__setup_elastic_package_profile() + self.__spin_stack() + self.__install_plugin() + self.__reload_container() + self.__update_pipeline_config() + + def stop_elastic_stack(self) -> None: + print(f"Stopping elastic-package stack...") + self.__teardown_stack() diff --git a/.buildkite/scripts/e2e-pipeline/config/pipeline.conf b/.buildkite/scripts/e2e-pipeline/config/pipeline.conf new file mode 100644 index 00000000..2849b02b --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/config/pipeline.conf @@ -0,0 +1,32 @@ +input { + elastic_agent { + port => 5044 + ssl_enabled => true + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca-cert.pem"] + ssl_certificate => "/usr/share/logstash/config/certs/cert.pem" + ssl_key => "/usr/share/logstash/config/certs/key.pem" + } +} + + +filter { + elastic_integration { + remove_field => ['@version'] + hosts => ["https://elasticsearch:9200"] + username => "${ELASTIC_USER}" + password => "${ELASTIC_PASSWORD}" + ssl_enabled => true + ssl_verification_mode => "none" + } +} + + +output { + elasticsearch { + hosts => ["https://elasticsearch:9200"] + user => "${ELASTIC_USER}" + password => "${ELASTIC_PASSWORD}" + ssl_enabled => true + ssl_verification_mode => "none" + } +} diff --git a/.buildkite/scripts/e2e-pipeline/config/serverless_pipeline.conf b/.buildkite/scripts/e2e-pipeline/config/serverless_pipeline.conf new file mode 100644 index 00000000..328a7634 --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/config/serverless_pipeline.conf @@ -0,0 +1,32 @@ +input { + elastic_agent { + port => 5044 + ssl_enabled => true + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca-cert.pem"] + ssl_certificate => "/usr/share/logstash/config/certs/cert.pem" + ssl_key => "/usr/share/logstash/config/certs/key.pem" + } +} + + +filter { + elastic_integration { + remove_field => ['@version'] + hosts => ["${ELASTIC_HOSTS}"] + username => "${ELASTIC_USER}" + password => "${ELASTIC_PASSWORD}" + ssl_enabled => true + ssl_verification_mode => "none" + } +} + + +output { + elasticsearch { + hosts => ["${ELASTIC_HOSTS}"] + user => "${ELASTIC_USER}" + password => "${ELASTIC_PASSWORD}" + ssl_enabled => true + ssl_verification_mode => "none" + } +} diff --git a/.buildkite/scripts/e2e-pipeline/generate-steps.py b/.buildkite/scripts/e2e-pipeline/generate-steps.py new file mode 100644 index 00000000..33fe1f80 --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/generate-steps.py @@ -0,0 +1,86 @@ +import os +import requests +import sys +import typing +from requests.adapters import HTTPAdapter, Retry + +from ruamel.yaml import YAML + +RELEASES_URL = "https://raw.githubusercontent.com/elastic/logstash/main/ci/logstash_releases.json" +TEST_COMMAND: typing.final = ".buildkite/scripts/run_e2e_tests.sh" + + +def call_url_with_retry(url: str, max_retries: int = 5, delay: int = 1) -> requests.Response: + schema = "https://" if "https://" in url else "http://" + session = requests.Session() + # retry on most common failures such as connection timeout(408), etc... + retries = Retry(total=max_retries, backoff_factor=delay, status_forcelist=[408, 502, 503, 504]) + session.mount(schema, HTTPAdapter(max_retries=retries)) + return session.get(url) + + +def generate_test_step(stack_version, es_treeish, snapshot) -> dict: + label_integration_test: typing.final = f"E2E tests for {stack_version}, snapshot: {snapshot}" + return { + "label": label_integration_test, + "command": TEST_COMMAND, + "env": { + "SNAPSHOT": snapshot, + "ELASTIC_STACK_VERSION": stack_version, + "ELASTICSEARCH_TREEISH": es_treeish + } + } + + +if __name__ == "__main__": + structure = { + "agents": { + "provider": "gcp", + "machineType": "n2-standard-4", + "imageProject": "elastic-images-prod", + "image": "family/platform-ingest-logstash-multi-jdk-ubuntu-2204", + "diskSizeGb": 120 + }, + "steps": []} + + steps = [] + response = call_url_with_retry(RELEASES_URL) + versions_json = response.json() + + # there are situations to manually run CIs with PR change, + # set MANUAL_TARGET_BRANCH with upstream target branch and run + manually_set_target_branch: typing.final = os.getenv("MANUAL_TARGET_BRANCH") + target_branch: typing.final = manually_set_target_branch if manually_set_target_branch else os.getenv("TARGET_BRANCH") + print(f"Running with target_branch: {target_branch}") + if target_branch == '8.x': + full_stack_version: typing.final = versions_json["snapshots"]["8.future"] + steps.append(generate_test_step(full_stack_version, target_branch, "true")) + elif target_branch == 'main': + full_stack_version: typing.final = versions_json["snapshots"][target_branch] + steps.append(generate_test_step(full_stack_version, target_branch, "true")) + else: + # generate steps for the version if released + releases = versions_json["releases"] + for release_version in releases: + if releases[release_version].startswith(target_branch): + steps.append(generate_test_step(releases[release_version], target_branch, "false")) + break + + # steps for snapshot version + snapshots = versions_json["snapshots"] + for snapshot_version in snapshots: + if snapshots[snapshot_version].startswith(target_branch): + steps.append(generate_test_step(snapshots[snapshot_version], target_branch, "false")) + break + + group_desc = f"{target_branch} branch E2E steps" + key_desc = "e2e-steps" + structure["steps"].append({ + "group": group_desc, + "key": key_desc, + "steps": steps + }) + + print( + '# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json') + YAML().dump(structure, sys.stdout) diff --git a/.buildkite/scripts/e2e-pipeline/logstash_stats.py b/.buildkite/scripts/e2e-pipeline/logstash_stats.py new file mode 100644 index 00000000..b51fb999 --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/logstash_stats.py @@ -0,0 +1,16 @@ +""" +A class to provide information about Logstash node stats. +""" + +import util + + +class LogstashStats: + LOGSTASH_STATS_URL = "http://localhost:9600/_node/stats" + + def __init__(self): + pass + + def get(self): + response = util.call_url_with_retry(self.LOGSTASH_STATS_URL) + return response.json() diff --git a/.buildkite/scripts/e2e-pipeline/main.py b/.buildkite/scripts/e2e-pipeline/main.py new file mode 100644 index 00000000..de6d5ca6 --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/main.py @@ -0,0 +1,60 @@ +""" + Main entry point of the E2E test suites +""" + +import os +from bootstrap import Bootstrap +from plugin_test import PluginTest +import util + +INTEGRATION_PACKAGES_TO_TEST = ["apache", "m365_defender", "nginx", "tomcat"] + + +class BootstrapContextManager: + def __enter__(self): + stack_version = os.environ.get("ELASTIC_STACK_VERSION") + project_type = os.environ.get("E2E_PROJECT_TYPE", "on_prems") + if stack_version is None: + raise Exception("ELASTIC_STACK_VERSION environment variable is missing, please export and try again.") + + print(f"Starting E2E test of Logstash running Elastic Integrations against {stack_version} version.") + self.bootstrap = Bootstrap(stack_version, project_type) + self.bootstrap.run_elastic_stack() + return self.bootstrap + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is not None: + traceback.print_exception(exc_type, exc_value, traceback) + + if self.bootstrap: + self.bootstrap.stop_elastic_stack() + + +def main(): + failed_packages = [] + + with BootstrapContextManager() as bootstrap: + working_dir = os.getcwd() + test_plugin = PluginTest() + for package in INTEGRATION_PACKAGES_TO_TEST: + try: + os.chdir(f"{working_dir}/integrations/packages/{package}") + test_plugin.on(package) + except Exception as e: + print(f"Test failed for {package} with {e}.") + failed_packages.append(package) + + container = util.get_logstash_container() + + # pretty printing + print(f"Logstash docker container logs..") + ls_container_logs = container.logs().decode('utf-8') + for log_line in ls_container_logs.splitlines(): + print(log_line) + + if len(failed_packages) > 0: + raise Exception(f"Following packages failed: {failed_packages}") + + +if __name__ == "__main__": + main() diff --git a/.buildkite/scripts/e2e-pipeline/plugin_test.py b/.buildkite/scripts/e2e-pipeline/plugin_test.py new file mode 100644 index 00000000..5d962e6a --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/plugin_test.py @@ -0,0 +1,61 @@ +""" +A class to validate the Integration Plugin with a given integration package +""" +import subprocess +from logstash_stats import LogstashStats + + +class PluginTest: + logstash_stats_api = LogstashStats() + LAST_PROCESSED_EVENTS = {"in": 0, "out": 0} + + def __init__(self): + pass + + def __analyze_logstash_throughput(self, package: str, elastic_package_result: subprocess.CompletedProcess) -> None: + pipeline_stats = self.logstash_stats_api.get()["pipelines"]["main"] + integration_stats = [item for item in pipeline_stats.get("plugins", {}).get("filters", []) if + item.get("name") == "elastic_integration"] + if len(integration_stats) <= 0: + raise Exception("Could not fetch elastic integration plugin stats.") + + processed_events = integration_stats[0]["events"] + print(f"Found integration plugin current event stats: {processed_events}") + in_events = processed_events["in"] - self.LAST_PROCESSED_EVENTS["in"] + out_events = processed_events["out"] - self.LAST_PROCESSED_EVENTS["out"] + + if out_events == 0: + if elastic_package_result.returncode != 0: + raise Exception(f"events not processed, events, in: {in_events}, out: {out_events}") + else: + print("WARN: `elastic_integration` plugin didn't output events. This may happen when ingest pipeline " + "cancel the events or `elasticsearch-output` failed, check Logstash docker logs.") + if in_events != out_events: + if elastic_package_result.returncode != 0: + raise Exception(f"processed events are not equal, events, in: {in_events}, out: {out_events}") + else: + print("WARN: in and out event count in `elastic_integration` differ. This may happen when ingest " + "pipeline cancel some events or `elasticsearch-output` failed, check Logstash docker logs.") + + print(f"Test succeeded with: {package}") + self.LAST_PROCESSED_EVENTS = processed_events + + def on(self, package: str) -> None: + print(f"Testing the package: {package}") + + # `elastic-package test system` deploys current package + # emits the data stream events, the process finishes when the package sends all available events + # note that `elastic-package test pipeline` is for validation purpose only + result = subprocess.run(["elastic-package", "test", "system"], universal_newlines=True, stdout=subprocess.PIPE) + if result.returncode != 0: + # elastic-package also checks ES index if event is arrived, and compares with exp + # sometimes tests may fail because of multiple reasons: timeout, + # ecs needs to be disabled since event already has `event.original`, etc... + print(f"Internal failure happened with {package}, process return code: {result.returncode}.") + + if result.stdout: + # print line by line for better visibility + for result_line in result.stdout.splitlines(): print(f"{result_line}") + + # although there was an error, le's check how LS performed and make sure errors weren't because of Logstash + self.__analyze_logstash_throughput(package, result) diff --git a/.buildkite/scripts/e2e-pipeline/requirements.txt b/.buildkite/scripts/e2e-pipeline/requirements.txt new file mode 100644 index 00000000..8301cfcf --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/requirements.txt @@ -0,0 +1,2 @@ +requests==2.31.0 +docker==7.0.0 \ No newline at end of file diff --git a/.buildkite/scripts/e2e-pipeline/util.py b/.buildkite/scripts/e2e-pipeline/util.py new file mode 100644 index 00000000..22c3893b --- /dev/null +++ b/.buildkite/scripts/e2e-pipeline/util.py @@ -0,0 +1,32 @@ +import docker +import os +import requests +import subprocess +from docker.models.containers import Container +from requests.adapters import HTTPAdapter, Retry + + +def get_home_path() -> str: + return os.path.expanduser("~") + + +def call_url_with_retry(url: str, max_retries: int = 5, delay: int = 1) -> requests.Response: + schema = "https://" if "https://" in url else "http://" + session = requests.Session() + # retry on most common failures such as connection timeout(408), etc... + retries = Retry(total=max_retries, backoff_factor=delay, status_forcelist=[408, 502, 503, 504]) + session.mount(schema, HTTPAdapter(max_retries=retries)) + return session.get(url) + + +def get_logstash_container() -> Container: + client = docker.from_env() + return client.containers.get("elastic-package-stack-e2e-logstash-1") + + +def run_or_raise_error(commands: list, error_message): + result = subprocess.run(commands, universal_newlines=True, stdout=subprocess.PIPE) + if result.returncode != 0: + full_error_message = (error_message + ", output: " + result.stdout.decode('utf-8')) \ + if result.stdout else error_message + raise Exception(f"{full_error_message}") diff --git a/.buildkite/scripts/pull-request-pipeline/generate-steps.py b/.buildkite/scripts/pull-request-pipeline/generate-steps.py new file mode 100644 index 00000000..de431089 --- /dev/null +++ b/.buildkite/scripts/pull-request-pipeline/generate-steps.py @@ -0,0 +1,111 @@ +import os +import requests +import sys +import typing +from requests.adapters import HTTPAdapter, Retry + +from ruamel.yaml import YAML + +RELEASES_URL = "https://raw.githubusercontent.com/elastic/logstash/main/ci/logstash_releases.json" +TEST_MATRIX_URL = "https://raw.githubusercontent.com/elastic/logstash-filter-elastic_integration/main/.buildkite/pull-request-test-matrix.yml" +TEST_COMMAND: typing.final = ".buildkite/scripts/run_tests.sh" + + +def generate_unit_and_integration_test_steps(stack_version, snapshot) -> list[typing.Any]: + test_steps = [] + + # step-1, unit tests + label_unit_test: typing.final = f"Unit test for {stack_version}, snapshot: {snapshot}" + test_steps.append({ + "label": label_unit_test, + "command": TEST_COMMAND, + "env": { + "SNAPSHOT": snapshot, + "ELASTIC_STACK_VERSION": stack_version, + "INTEGRATION": "false" + } + }) + + # step-2, integration tests + label_integration_test: typing.final = f"Integration test for {stack_version}, snapshot: {snapshot}" + test_steps.append({ + "label": label_integration_test, + "command": TEST_COMMAND, + "env": { + "SNAPSHOT": snapshot, + "ELASTIC_STACK_VERSION": stack_version, + "INTEGRATION": "true", + "SECURE_INTEGRATION": "true", + "LOG_LEVEL": "info" + } + }) + return test_steps + + +def call_url_with_retry(url: str, max_retries: int = 5, delay: int = 1) -> requests.Response: + schema = "https://" if "https://" in url else "http://" + session = requests.Session() + # retry on most common failures such as connection timeout(408), etc... + retries = Retry(total=max_retries, backoff_factor=delay, status_forcelist=[408, 502, 503, 504]) + session.mount(schema, HTTPAdapter(max_retries=retries)) + return session.get(url) + + +def make_matrix_version_key(branch: str) -> str: + branch_parts: typing.final = branch.split(".") + return branch_parts[0] + ".x" + + +if __name__ == "__main__": + structure = { + "agents": { + "provider": "gcp", + "machineType": "n1-standard-4", + "image": "family/core-ubuntu-2204" + }, + "steps": []} + + steps = [] + response = call_url_with_retry(RELEASES_URL) + versions_json = response.json() + + matrix_map = call_url_with_retry(TEST_MATRIX_URL) + matrix_map_yaml = YAML().load(matrix_map.text) + + # there are situations to manually run CIs with PR change, + # set MANUAL_TARGET_BRANCH with upstream target branch and run + manually_set_target_branch: typing.final = os.getenv("MANUAL_TARGET_BRANCH") + target_branch: typing.final = manually_set_target_branch if manually_set_target_branch \ + else os.getenv("TARGET_BRANCH") + print(f"Running with target_branch: {target_branch}") + + matrix_version_key = target_branch if target_branch == "main" else make_matrix_version_key(target_branch) + matrix_releases = matrix_map_yaml.get(matrix_version_key, {}).get("releases", []) + matrix_snapshots = matrix_map_yaml.get(matrix_version_key, {}).get("snapshots", []) + + # let's print what matrix we have got, helps debugging + print(f"matrix_releases: {matrix_releases}") + print(f"matrix_snapshots: {matrix_snapshots}") + for matrix_release in matrix_releases: + full_stack_version: typing.final = versions_json["releases"].get(matrix_release) + # noop, if they are declared in the matrix but not in the release + if full_stack_version is not None: + steps += generate_unit_and_integration_test_steps(full_stack_version, "false") + + for matrix_snapshot in matrix_snapshots: + full_stack_version: typing.final = versions_json["snapshots"].get(matrix_snapshot) + # noop, if they are declared in the matrix but not in the snapshot + if full_stack_version is not None: + steps += generate_unit_and_integration_test_steps(full_stack_version, "true") + + group_desc = f"{target_branch} branch steps" + key_desc = "pr-and-build-steps" + structure["steps"].append({ + "group": group_desc, + "key": key_desc, + "steps": steps + }) + + print( + '# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json') + YAML().dump(structure, sys.stdout) diff --git a/.buildkite/scripts/run_e2e_tests.sh b/.buildkite/scripts/run_e2e_tests.sh new file mode 100755 index 00000000..077652c8 --- /dev/null +++ b/.buildkite/scripts/run_e2e_tests.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +set -euo pipefail + +export PATH="/opt/buildkite-agent/.rbenv/bin:/opt/buildkite-agent/.pyenv/bin:$PATH" +eval "$(rbenv init -)" +eval "$(pyenv init -)" + +### +# Set the Java home based on .java-version +set_required_jdk() { + set +o nounset + java_version="$(cat .java-version)" + echo "Required JDK version: $java_version" + if [[ "$java_version" == "17.0" ]]; then + jdk_home="/opt/buildkite-agent/.java/adoptiumjdk_17" + elif [[ "$java_version" == "21.0" ]]; then + jdk_home="/opt/buildkite-agent/.java/adoptiumjdk_21" + else + echo "Unsupported JDK." + exit 1 + fi + + export JAVA_HOME=$jdk_home + export PATH="$jdk_home:$PATH" +} + +### +# Build the plugin, to do so we need Logstash source +build_logstash() { + if [[ -d /usr/local/git-references/git-github-com-elastic-logstash-git ]]; then + retry -t 5 -- git clone -v --reference /usr/local/git-references/git-github-com-elastic-logstash-git -- https://github.com/elastic/logstash.git . + else + retry -t 5 -- git clone --single-branch https://github.com/elastic/logstash.git + fi + + cd logstash && ./gradlew clean bootstrap assemble installDefaultGems && cd .. + LOGSTASH_PATH=$(pwd)/logstash + export LOGSTASH_PATH +} + +build_plugin() { + ./gradlew clean vendor localGem +} + +set_required_jdk +build_logstash +build_plugin + +### +# Install E2E prerequisites and run E2E tests +python3 -mpip install -r .buildkite/scripts/e2e-pipeline/requirements.txt +python3 .buildkite/scripts/e2e-pipeline/main.py \ No newline at end of file diff --git a/.buildkite/scripts/run_tests.sh b/.buildkite/scripts/run_tests.sh index 1108f653..b462caf4 100755 --- a/.buildkite/scripts/run_tests.sh +++ b/.buildkite/scripts/run_tests.sh @@ -1,15 +1 @@ -if [ -z "$ELASTICSEARCH_TREEISH" ]; then - source .buildkite/scripts/resolve_es_treeish.sh - echo "Resolved ELASTICSEARCH_TREEISH: ${ELASTICSEARCH_TREEISH}" -else - echo "Using ELASTICSEARCH_TREEISH ${ELASTICSEARCH_TREEISH} defined in the ENV." -fi - -if [ -z "$TARGET_BRANCH" ]; then - echo "Target branch is not specified, using default branch: main or BK defined" -else - echo "Changing the branch for ${TARGET_BRANCH}" - git checkout "$TARGET_BRANCH" -fi - mkdir -p .ci && curl -sL --retry 5 --retry-delay 5 https://github.com/logstash-plugins/.ci/archive/1.x.tar.gz | tar zxvf - --skip-old-files --strip-components=1 -C .ci --wildcards '*Dockerfile*' '*docker*' '*.sh' && .ci/docker-setup.sh && .ci/docker-run.sh \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 5c258231..04f816d3 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,2 @@ LOGSTASH_PATH=../../logstash -ELASTICSEARCH_TREEISH=8.16 +ELASTICSEARCH_TREEISH=8.x diff --git a/src/main/java/co/elastic/logstash/filters/elasticintegration/IngestPipelineFactory.java b/src/main/java/co/elastic/logstash/filters/elasticintegration/IngestPipelineFactory.java index 43f4c3e8..ec952314 100644 --- a/src/main/java/co/elastic/logstash/filters/elasticintegration/IngestPipelineFactory.java +++ b/src/main/java/co/elastic/logstash/filters/elasticintegration/IngestPipelineFactory.java @@ -46,7 +46,7 @@ public IngestPipelineFactory withProcessors(final Map public Optional create(final PipelineConfiguration pipelineConfiguration) { try { - final Pipeline pipeline = Pipeline.create(pipelineConfiguration.getId(), pipelineConfiguration.getConfigAsMap(), processorFactories, scriptService); + final Pipeline pipeline = Pipeline.create(pipelineConfiguration.getId(), pipelineConfiguration.getConfig(false), processorFactories, scriptService); final IngestPipeline ingestPipeline = new IngestPipeline(pipelineConfiguration, pipeline); LOGGER.debug(() -> String.format("successfully created ingest pipeline `%s` from pipeline configuration", pipelineConfiguration.getId())); return Optional.of(ingestPipeline); diff --git a/src/test/java/co/elastic/logstash/filters/elasticintegration/ElasticsearchPipelineConfigurationResolverTest.java b/src/test/java/co/elastic/logstash/filters/elasticintegration/ElasticsearchPipelineConfigurationResolverTest.java index bce9687b..8434c14c 100644 --- a/src/test/java/co/elastic/logstash/filters/elasticintegration/ElasticsearchPipelineConfigurationResolverTest.java +++ b/src/test/java/co/elastic/logstash/filters/elasticintegration/ElasticsearchPipelineConfigurationResolverTest.java @@ -59,7 +59,7 @@ void testLoadConfigurationExists() throws Exception { assertThat(resolvedPipelineConfiguration, isPresent()); resolvedPipelineConfiguration.ifPresent(pipelineConfiguration -> { assertThat(pipelineConfiguration.getId(), is(equalTo("my-pipeline-id"))); - final Map configAsMap = pipelineConfiguration.getConfigAsMap(); + final Map configAsMap = pipelineConfiguration.getConfig(); assertThat(configAsMap, is(equalTo(EXPECTED_MY_PIPELINE_ID_CONFIG_MAP))); }); }); @@ -77,7 +77,7 @@ void testLoadConfigurationPipelineWithSpecialCharacters() throws Exception { assertThat(resolvedPipelineConfiguration, isPresent()); resolvedPipelineConfiguration.ifPresent(pipelineConfiguration -> { assertThat(pipelineConfiguration.getId(), is(equalTo("special char pipeline"))); - final Map configAsMap = pipelineConfiguration.getConfigAsMap(); + final Map configAsMap = pipelineConfiguration.getConfig(); assertThat(configAsMap, is(equalTo(EXPECTED_MY_PIPELINE_ID_CONFIG_MAP))); }); }); diff --git a/src/test/java/co/elastic/logstash/filters/elasticintegration/PipelineConfigurationFactoryTest.java b/src/test/java/co/elastic/logstash/filters/elasticintegration/PipelineConfigurationFactoryTest.java index 43c04d16..d60ddc07 100644 --- a/src/test/java/co/elastic/logstash/filters/elasticintegration/PipelineConfigurationFactoryTest.java +++ b/src/test/java/co/elastic/logstash/filters/elasticintegration/PipelineConfigurationFactoryTest.java @@ -44,7 +44,7 @@ public void testParseNamedObjectWithOnePipeline() throws Exception { final PipelineConfiguration loaded = PipelineConfigurationFactory.getInstance().parseNamedObject(json); assertThat(loaded, is(notNullValue())); assertThat(loaded.getId(), is(equalTo("pipeline-id-one"))); - assertThat(loaded.getConfigAsMap(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); + assertThat(loaded.getConfig(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); } @Test @@ -74,7 +74,7 @@ public void testParseNamedObjectsWithOnePipeline() throws Exception { assertThat(loaded.get(0), is(notNullValue())); assertThat(loaded.get(0).getId(), is(equalTo("pipeline-id-one"))); - assertThat(loaded.get(0).getConfigAsMap(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); + assertThat(loaded.get(0).getConfig(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); } @Test @@ -86,12 +86,12 @@ public void testParseNamedObjectWithTwoPipelines() throws Exception { assertThat(loaded.get(0), is(notNullValue())); assertThat(loaded.get(0).getId(), is(equalTo("pipeline-id-one"))); - assertThat(loaded.get(0).getConfigAsMap(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); + assertThat(loaded.get(0).getConfig(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); assertThat(loaded.get(1), is(notNullValue())); assertThat(loaded.get(1).getId(), is(equalTo("pipeline-id-two"))); - assertThat(loaded.get(1).getConfigAsMap(), is(equalTo(EXPECTED_PIPELINE_ID_TWO_CONFIG_MAP))); + assertThat(loaded.get(1).getConfig(), is(equalTo(EXPECTED_PIPELINE_ID_TWO_CONFIG_MAP))); } @Test @@ -109,7 +109,7 @@ public void testParseConfigOnly() throws Exception { final PipelineConfiguration loaded = PipelineConfigurationFactory.getInstance().parseConfigOnly("bananas" , json); assertThat(loaded, is(notNullValue())); assertThat(loaded.getId(), is(equalTo("bananas"))); - assertThat(loaded.getConfigAsMap(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); + assertThat(loaded.getConfig(), is(equalTo(EXPECTED_PIPELINE_ID_ONE_CONFIG_MAP))); } String elasticsearchApiFormattedJson(final String name) throws IOException { From 364e99a42fef15b0b304012d69a1c2a334edb16f Mon Sep 17 00:00:00 2001 From: Mashhur Date: Thu, 12 Dec 2024 14:05:10 -0800 Subject: [PATCH 2/2] Set ES treeish to 8.17 --- gradle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle.properties b/gradle.properties index 04f816d3..2088516c 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,2 @@ LOGSTASH_PATH=../../logstash -ELASTICSEARCH_TREEISH=8.x +ELASTICSEARCH_TREEISH=8.17