diff --git a/.buildkite/pull_request_pipeline.yml b/.buildkite/pull_request_pipeline.yml index a087813dfbf..ca18158cb78 100644 --- a/.buildkite/pull_request_pipeline.yml +++ b/.buildkite/pull_request_pipeline.yml @@ -19,6 +19,26 @@ steps: export GRADLE_OPTS="-Xmx2g -Dorg.gradle.daemon=false -Dorg.gradle.logging.level=info" ci/license_check.sh -m 4G + - label: ":passport_control: License check - Fedramp High Mode" + key: "license-check-fedramp-high" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + source .buildkite/scripts/common/container-agent.sh + export JRUBY_OPTS="-J-Xmx1g" + export GRADLE_OPTS="-Xmx2g -Dorg.gradle.daemon=false -Dorg.gradle.logging.level=info" + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run -e ORG_GRADLE_PROJECT_fedrampHighMode=true test-runner-image ci/license_check.sh -m 4G + - label: ":rspec: Ruby unit tests" key: "ruby-unit-tests" agents: @@ -39,6 +59,25 @@ steps: artifact_paths: - "coverage/coverage.json" + - label: ":rspec: Ruby unit tests - FIPS mode" + key: "ruby-unit-tests-fips" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run test-runner-image ./gradlew --info --stacktrace -PfedrampHighMode=true rubyTests + artifact_paths: + - "coverage/coverage.json" + - label: ":java: Java unit tests" key: "java-unit-tests" agents: @@ -60,6 +99,29 @@ steps: - "**/jacocoTestReport.xml" - "**/build/classes/**/*.*" + - label: ":java: Java unit tests - FIPS mode" + key: "java-unit-tests-fips" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + env: + ENABLE_SONARQUBE: true + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run test-runner-image ./gradlew --info --stacktrace -PfedrampHighMode=true javaTests + artifact_paths: + - "**/build/test-results/javaTests/TEST-*.xml" + - "**/jacocoTestReport.xml" + - "**/build/classes/**/*.*" + - label: ":sonarqube: Continuous Code Inspection" if: | build.pull_request.id != null || @@ -81,6 +143,84 @@ steps: manual: allowed: true + - label: "Observability SRE container smoke test" + key: "observability-sre-container-smoke-test" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + source .buildkite/scripts/common/vm-agent.sh + QUALIFIED_VERSION="$(.buildkite/scripts/common/qualified-version.sh)" + # Build the image locally with the gradle task + ./gradlew --stacktrace artifactDockerObservabilitySRE -PfedrampHighMode=true + # Ensure it can at least start logstash + docker run docker.elastic.co/logstash/logstash-observability-sre:$${QUALIFIED_VERSION} \ + logstash -e 'input { generator { count => 3 } } output { stdout { codec => rubydebug } }' + # Run the smoke tests on the PR code + docker tag docker.elastic.co/logstash/logstash-observability-sre:$${QUALIFIED_VERSION} \ + pr-built-observability-sre-image + # observabilitySREsmokeTests orchestrates FIPS-mode docker images + # and validates assertions separately, so it does not need FIPS flag. + ./gradlew observabilitySREsmokeTests --stacktrace + + - label: ":lab_coat: Integration Tests - FIPS mode / part 1-of-3" + key: "integration-tests-fips-part-1-of-3" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run -e FEDRAMP_HIGH_MODE=true test-runner-image ci/integration_tests.sh split 0 3 + + - label: ":lab_coat: Integration Tests - FIPS mode / part 2-of-3" + key: "integration-tests-fips-part-2-of-3" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run -e FEDRAMP_HIGH_MODE=true test-runner-image ci/integration_tests.sh split 1 3 + + - label: ":lab_coat: Integration Tests - FIPS mode / part 3-of-3" + key: "integration-tests-fips-part-3-of-3" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run -e FEDRAMP_HIGH_MODE=true test-runner-image ci/integration_tests.sh split 2 3 + - label: ":lab_coat: Integration Tests / part 1-of-3" key: "integration-tests-part-1-of-3" agents: @@ -228,6 +368,40 @@ steps: source .buildkite/scripts/common/container-agent.sh x-pack/ci/integration_tests.sh + - label: ":lab_coat: x-pack unit tests - FIPS mode" + key: "x-pack-unit-tests-fips" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run -e FEDRAMP_HIGH_MODE=true test-runner-image x-pack/ci/unit_tests.sh + + - label: ":lab_coat: x-pack integration - FIPS mode" + key: "integration-tests-x-pack-fips" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-4" + diskSizeGb: 64 + retry: + automatic: + - limit: 3 + command: | + set -euo pipefail + + docker build -t test-runner-image -f x-pack/distributions/internal/observabilitySRE/docker/Dockerfile . + docker run -e FEDRAMP_HIGH_MODE=true test-runner-image x-pack/ci/integration_tests.sh + - wait: ~ continue_on_failure: true diff --git a/.buildkite/scripts/common/qualified-version.sh b/.buildkite/scripts/common/qualified-version.sh index 4d8e32adbf1..29e7a7b2f20 100755 --- a/.buildkite/scripts/common/qualified-version.sh +++ b/.buildkite/scripts/common/qualified-version.sh @@ -17,6 +17,11 @@ export QUALIFIED_VERSION="$( # e.g: 8.0.0-alpha1 printf '%s' "${VERSION_QUALIFIER:+-${VERSION_QUALIFIER}}" + # Include git SHA if requested + if [[ -n "${INCLUDE_COMMIT_ID:+x}" ]]; then + printf '%s' "-$(git rev-parse --short HEAD)" + fi + # add the SNAPSHOT tag unless WORKFLOW_TYPE=="staging" or RELEASE=="1" if [[ ! ( "${WORKFLOW_TYPE:-}" == "staging" || "${RELEASE:+$RELEASE}" == "1" ) ]]; then printf '%s' "-SNAPSHOT" diff --git a/.buildkite/scripts/dra/build-and-push-observability-sre.sh b/.buildkite/scripts/dra/build-and-push-observability-sre.sh new file mode 100755 index 00000000000..192c5d35de4 --- /dev/null +++ b/.buildkite/scripts/dra/build-and-push-observability-sre.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Script to build and publish ObservabilitySRE container +# Currently this is built on a host with the target architecture. +# This allows us to utilize the make file for building the container and +# to ensure the best compatability with the host architecture. +# A later step in CI will take care of pushing a tag that references the right +# image using `docker manifest` commands. + +echo "Setting up environment" +source .buildkite/scripts/common/vm-agent.sh +source .buildkite/scripts/dra/docker-env-setup.sh + +echo "Building ObservabilitySRE container" +./gradlew --stacktrace artifactDockerObservabilitySRE -PfedrampHighMode=true + +echo "Pushing ObservabilitySRE container to Docker repository" +docker_login + +# Get qualified version without SHA (this is what the gradle task will produce) +# Note that the gradle task always produces a version with -SNAPSHOT so if the +# workflow type is staging we need to append -SNAPSHOT to the version. +QUALIFIED_VERSION="$(.buildkite/scripts/common/qualified-version.sh)" +if [[ "${WORKFLOW_TYPE:-}" == "staging" && "${QUALIFIED_VERSION}" != *-SNAPSHOT ]]; then + QUALIFIED_VERSION="${QUALIFIED_VERSION}-SNAPSHOT" +fi + +# Set environment variable to include SHA and get version with SHA +QUALIFIED_VERSION_WITH_SHA="$(INCLUDE_COMMIT_ID=1 .buildkite/scripts/common/qualified-version.sh)" + +REGISTRY_PATH=docker.elastic.co/logstash/logstash-observability-sre + +# Current architecture +ARCH="${ARCH:-x86_64}" # Default to x86_64 if ARCH is not set +echo "Architecture: ${ARCH}" + +# Create the full tag with SHA and architecture +FULL_TAG="${QUALIFIED_VERSION_WITH_SHA}-${ARCH}" +echo "Tagging and pushing: ${REGISTRY_PATH}:${QUALIFIED_VERSION} as ${REGISTRY_PATH}:${FULL_TAG}" +docker tag ${REGISTRY_PATH}:${QUALIFIED_VERSION} ${REGISTRY_PATH}:${FULL_TAG} +docker push ${REGISTRY_PATH}:${FULL_TAG} + +# Teardown Docker environment +source .buildkite/scripts/dra/docker-env-teardown.sh \ No newline at end of file diff --git a/.buildkite/scripts/dra/generatesteps.py b/.buildkite/scripts/dra/generatesteps.py index 85891e1b9ac..012b48124f9 100644 --- a/.buildkite/scripts/dra/generatesteps.py +++ b/.buildkite/scripts/dra/generatesteps.py @@ -79,6 +79,61 @@ def package_aarch64_docker_step(branch, workflow_type): return step +def ship_observability_sre_image_steps(branch, workflow_type): + step = f''' +- label: ":package: Build & Ship aarch64 ObservabilitySRE container / {branch}-{workflow_type.upper()}" + key: "logstash_build_and_ship_observability_sre_aarch64" + soft_fail: true + depends_on: logstash_publish_dra + agents: + provider: aws + imagePrefix: platform-ingest-logstash-ubuntu-2204-aarch64 + instanceType: "m6g.4xlarge" + diskSizeGb: 200 + artifact_paths: + - "**/*.hprof" + command: | + export WORKFLOW_TYPE="{workflow_type}" + export PATH="/opt/buildkite-agent/.rbenv/bin:/opt/buildkite-agent/.pyenv/bin:$PATH" + export ARCH="aarch64" + eval "$(rbenv init -)" + .buildkite/scripts/dra/build-and-push-observability-sre.sh +- label: ":package: Build & Ship x86_64 ObservabilitySRE container / {branch}-{workflow_type.upper()}" + key: "logstash_build_and_ship_observability_sre_x86_64" + soft_fail: true + depends_on: logstash_publish_dra + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-16" + diskSizeGb: 200 + artifact_paths: + - "**/*.hprof" + command: | + export WORKFLOW_TYPE="{workflow_type}" + export PATH="/opt/buildkite-agent/.rbenv/bin:/opt/buildkite-agent/.pyenv/bin:$PATH" + export ARCH="x86_64" + eval "$(rbenv init -)" + .buildkite/scripts/dra/build-and-push-observability-sre.sh +- label: ":docker: Create & Push ObservabilitySRE Multi-Arch Manifest / {branch}-{workflow_type.upper()}" + key: "logstash_create_observability_sre_manifest" + depends_on: + - "logstash_build_and_ship_observability_sre_aarch64" + - "logstash_build_and_ship_observability_sre_x86_64" + agents: + provider: gcp + imageProject: elastic-images-prod + image: family/platform-ingest-logstash-ubuntu-2204 + machineType: "n2-standard-8" + command: | + export WORKFLOW_TYPE="{workflow_type}" + export PATH="/opt/buildkite-agent/.rbenv/bin:/opt/buildkite-agent/.pyenv/bin:$PATH" + eval "$(rbenv init -)" + .buildkite/scripts/dra/multi-architecture-observability-sre.sh +''' + return step + def publish_dra_step(branch, workflow_type, depends_on): step = f''' - label: ":elastic-stack: Publish / {branch}-{workflow_type.upper()} DRA artifacts" @@ -139,9 +194,14 @@ def build_steps_to_yaml(branch, workflow_type): "steps": build_steps_to_yaml(branch, workflow_type), }) - # Final step: pull artifacts built above and publish them via the release-manager + # Pull artifacts built above and publish them via the release-manager structure["steps"].extend( yaml.safe_load(publish_dra_step(branch, workflow_type, depends_on=group_key)), ) + # Once published, do the same for observabilitySRE image + structure["steps"].extend( + yaml.safe_load(ship_observability_sre_image_steps(branch, workflow_type)), + ) + print(YAML_HEADER + yaml.dump(structure, Dumper=yaml.Dumper, sort_keys=False)) diff --git a/.buildkite/scripts/dra/multi-architecture-observability-sre.sh b/.buildkite/scripts/dra/multi-architecture-observability-sre.sh new file mode 100755 index 00000000000..eb85faff40c --- /dev/null +++ b/.buildkite/scripts/dra/multi-architecture-observability-sre.sh @@ -0,0 +1,53 @@ +#!/bin/bash +# Script to create and push Docker manifest for multi-architecture support +# This MUST be run after build-and-push-observabilty-sre.sh! + +source .buildkite/scripts/common/vm-agent.sh +source .buildkite/scripts/dra/docker-env-setup.sh + +docker_login + +# Set INCLUDE_COMMIT_ID to include git SHA in version +QUALIFIED_VERSION="$(INCLUDE_COMMIT_ID=1 .buildkite/scripts/common/qualified-version.sh)" +REGISTRY_PATH=docker.elastic.co/logstash/logstash-observability-sre + +# Architecture-specific tags (created by the build steps) +X86_64_TAG="${QUALIFIED_VERSION}-x86_64" +AARCH64_TAG="${QUALIFIED_VERSION}-aarch64" + +# Target manifest tags - already has SHA from QUALIFIED_VERSION +VERSION_MANIFEST_TAG="${QUALIFIED_VERSION}" + +# Create and push manifest with version (which already includes SHA) +echo "Creating manifest list for: ${REGISTRY_PATH}:${VERSION_MANIFEST_TAG}" +docker manifest create ${REGISTRY_PATH}:${VERSION_MANIFEST_TAG} \ + ${REGISTRY_PATH}:${X86_64_TAG} \ + ${REGISTRY_PATH}:${AARCH64_TAG} + +docker manifest annotate ${REGISTRY_PATH}:${VERSION_MANIFEST_TAG} \ + ${REGISTRY_PATH}:${X86_64_TAG} --os linux --arch amd64 + +docker manifest annotate ${REGISTRY_PATH}:${VERSION_MANIFEST_TAG} \ + ${REGISTRY_PATH}:${AARCH64_TAG} --os linux --arch arm64 + +echo "Pushing manifest: ${REGISTRY_PATH}:${VERSION_MANIFEST_TAG}" +docker manifest push ${REGISTRY_PATH}:${VERSION_MANIFEST_TAG} + +# Also create version without SHA for effective "latest" tag +BASE_VERSION="$(.buildkite/scripts/common/qualified-version.sh)" +echo "Creating manifest list for: ${REGISTRY_PATH}:${BASE_VERSION}" +docker manifest create ${REGISTRY_PATH}:${BASE_VERSION} \ + ${REGISTRY_PATH}:${X86_64_TAG} \ + ${REGISTRY_PATH}:${AARCH64_TAG} + +docker manifest annotate ${REGISTRY_PATH}:${BASE_VERSION} \ + ${REGISTRY_PATH}:${X86_64_TAG} --os linux --arch amd64 + +docker manifest annotate ${REGISTRY_PATH}:${BASE_VERSION} \ + ${REGISTRY_PATH}:${AARCH64_TAG} --os linux --arch arm64 + +echo "Pushing manifest: ${REGISTRY_PATH}:${BASE_VERSION}" +docker manifest push ${REGISTRY_PATH}:${BASE_VERSION} + +# Teardown Docker environment +source .buildkite/scripts/dra/docker-env-teardown.sh \ No newline at end of file diff --git a/.buildkite/scripts/exhaustive-tests/generate-steps.py b/.buildkite/scripts/exhaustive-tests/generate-steps.py index c042985b1b0..c84e1cdddbf 100644 --- a/.buildkite/scripts/exhaustive-tests/generate-steps.py +++ b/.buildkite/scripts/exhaustive-tests/generate-steps.py @@ -168,6 +168,26 @@ def acceptance_docker_steps()-> list[typing.Any]: return steps +def fips_test_runner_step() -> dict[str, typing.Any]: + step = { + "label": "Observability SRE Acceptance Tests", + "key": "observabilitySRE-acceptance-tests", + "agents": { + "provider": "aws", + "instanceType": "m6i.xlarge", + "diskSizeGb": 60, + "instanceMaxAge": 1440, + "imagePrefix": "platform-ingest-logstash-ubuntu-2204-fips" + }, + "retry": {"automatic": [{"limit": 1}]}, + "command": LiteralScalarString("""#!/usr/bin/env bash +set -euo pipefail +source .buildkite/scripts/common/vm-agent.sh +./gradlew observabilitySREacceptanceTests --stacktrace +"""), + } + return step + if __name__ == "__main__": LINUX_OS_ENV_VAR_OVERRIDE = os.getenv("LINUX_OS") WINDOWS_OS_ENV_VAR_OVERRIDE = os.getenv("WINDOWS_OS") @@ -215,5 +235,12 @@ def acceptance_docker_steps()-> list[typing.Any]: "steps": acceptance_docker_steps(), }) + structure["steps"].append({ + "group": "Observability SRE Acceptance Tests", + "key": "acceptance-observability-sre", + "depends_on": ["testing-phase"], + "steps": [fips_test_runner_step()], + }) + print('# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json') YAML().dump(structure, sys.stdout) diff --git a/build.gradle b/build.gradle index 87d139fb4d8..99246fbbb4e 100644 --- a/build.gradle +++ b/build.gradle @@ -39,6 +39,8 @@ plugins { id "com.dorongold.task-tree" version "2.1.0" } +apply from: "${projectDir}/x-pack/distributions/internal/observabilitySRE/build-ext.gradle" + apply plugin: 'de.undercouch.download' apply from: "rubyUtils.gradle" @@ -322,6 +324,34 @@ tasks.register("compileGrammar") { } } +tasks.register("artifactDockerObservabilitySRE") { + dependsOn bootstrap + inputs.files fileTree("${projectDir}/rakelib") + inputs.files fileTree("${projectDir}/bin") + inputs.files fileTree("${projectDir}/config") + inputs.files fileTree("${projectDir}/lib") + inputs.files fileTree("${projectDir}/logstash-core-plugin-api") + inputs.files fileTree("${projectDir}/logstash-core/lib") + inputs.files fileTree("${projectDir}/logstash-core/src") + inputs.files fileTree("${projectDir}/x-pack") + outputs.files fileTree("${buildDir}") { + include "Dockerfile-observability-sre" + include "logstash-observability-sre-${project.version}-SNAPSHOT-linux-*.tar.gz" + include "logstash-observability-sre-${project.version}-SNAPSHOT-docker-build-context.tar.gz" + include "plugin_aliases_hashed.yml" + include "jdk-*-linux-*.tar.gz" + } + doFirst { + if (!fedrampHighMode) { + logger.error("NOT in Fedramp High mode. Aborting.") + throw new GradleException("cannot build docker artifact for observabilitySRE without `-PfedrampHighMode=true`") + } + } + doLast { + rake(projectDir, buildDir, 'artifact:docker_observabilitySRE') + } +} + tasks.register("assembleTarDistribution") { dependsOn bootstrap inputs.files fileTree("${projectDir}/rakelib") diff --git a/ci/integration_tests.sh b/ci/integration_tests.sh index 318660bc951..363172ec3b8 100755 --- a/ci/integration_tests.sh +++ b/ci/integration_tests.sh @@ -10,6 +10,9 @@ export GRADLE_OPTS="-Xmx2g -Dorg.gradle.jvmargs=-Xmx2g -Dorg.gradle.daemon=false export SPEC_OPTS="--order rand --format documentation" export CI=true +# Option for running in fedramp high mode +FEDRAMP_FLAG="${FEDRAMP_HIGH_MODE/#/-PfedrampHighMode=}" + if [ -n "$BUILD_JAVA_HOME" ]; then GRADLE_OPTS="$GRADLE_OPTS -Dorg.gradle.java.home=$BUILD_JAVA_HOME" fi @@ -27,13 +30,13 @@ elif [[ $1 == "split" ]]; then specs=($(cd qa/integration; partition_files "${index}" "${count}" < <(find specs -name '*_spec.rb') )) echo "Running integration tests partition[${index}] of ${count}: ${specs[*]}" - ./gradlew runIntegrationTests -PrubyIntegrationSpecs="${specs[*]}" --console=plain + ./gradlew runIntegrationTests $FEDRAMP_FLAG -PrubyIntegrationSpecs="${specs[*]}" --console=plain elif [[ ! -z $@ ]]; then echo "Running integration tests 'rspec $@'" - ./gradlew runIntegrationTests -PrubyIntegrationSpecs="$@" --console=plain + ./gradlew runIntegrationTests $FEDRAMP_FLAG -PrubyIntegrationSpecs="$@" --console=plain else echo "Running all integration tests" - ./gradlew runIntegrationTests --console=plain + ./gradlew runIntegrationTests $FEDRAMP_FLAG --console=plain fi diff --git a/docker/Makefile b/docker/Makefile index 5fd218b67e9..cdc9915b6bf 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -20,7 +20,7 @@ else endif endif -IMAGE_FLAVORS ?= oss full wolfi +IMAGE_FLAVORS ?= oss full wolfi observability-sre DEFAULT_IMAGE_FLAVOR ?= full IMAGE_TAG := $(ELASTIC_REGISTRY)/logstash/logstash @@ -58,6 +58,15 @@ build-from-local-wolfi-artifacts: dockerfile (docker kill $(HTTPD); false); -docker kill $(HTTPD) +build-from-local-observability-sre-artifacts: dockerfile + docker run --rm -d --name=$(HTTPD) \ + -p 8000:8000 --expose=8000 -v $(ARTIFACTS_DIR):/mnt \ + python:3 bash -c 'cd /mnt && python3 -m http.server' + timeout 120 bash -c 'until curl -s localhost:8000 > /dev/null; do sleep 1; done' + docker build --progress=plain --network=host -t $(IMAGE_TAG)-observability-sre:$(VERSION_TAG) -f $(ARTIFACTS_DIR)/Dockerfile-observability-sre data/logstash || \ + (docker kill $(HTTPD); false); + -docker kill $(HTTPD) + COPY_FILES := $(ARTIFACTS_DIR)/docker/config/pipelines.yml $(ARTIFACTS_DIR)/docker/config/logstash-oss.yml $(ARTIFACTS_DIR)/docker/config/logstash-full.yml COPY_FILES += $(ARTIFACTS_DIR)/docker/config/log4j2.file.properties $(ARTIFACTS_DIR)/docker/config/log4j2.properties COPY_FILES += $(ARTIFACTS_DIR)/docker/env2yaml/env2yaml.go $(ARTIFACTS_DIR)/docker/env2yaml/go.mod $(ARTIFACTS_DIR)/docker/env2yaml/go.sum @@ -113,7 +122,7 @@ ironbank_docker_paths: mkdir -p $(ARTIFACTS_DIR)/ironbank/scripts/go/src/env2yaml/vendor mkdir -p $(ARTIFACTS_DIR)/ironbank/scripts/pipeline -public-dockerfiles: public-dockerfiles_oss public-dockerfiles_full public-dockerfiles_wolfi public-dockerfiles_ironbank +public-dockerfiles: public-dockerfiles_oss public-dockerfiles_full public-dockerfiles_wolfi public-dockerfiles_observability-sre public-dockerfiles_ironbank public-dockerfiles_full: templates/Dockerfile.erb docker_paths $(COPY_FILES) ../vendor/jruby/bin/jruby -S erb -T "-"\ @@ -121,7 +130,7 @@ public-dockerfiles_full: templates/Dockerfile.erb docker_paths $(COPY_FILES) elastic_version="${ELASTIC_VERSION}" \ arch="${ARCHITECTURE}" \ version_tag="${VERSION_TAG}" \ - release="${RELEASE}" \ + release="${RELEASE}" \ image_flavor="full" \ local_artifacts="false" \ templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-full" && \ @@ -142,7 +151,7 @@ public-dockerfiles_oss: templates/Dockerfile.erb docker_paths $(COPY_FILES) elastic_version="${ELASTIC_VERSION}" \ arch="${ARCHITECTURE}" \ version_tag="${VERSION_TAG}" \ - release="${RELEASE}" \ + release="${RELEASE}" \ image_flavor="oss" \ local_artifacts="false" \ templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-oss" && \ @@ -163,7 +172,7 @@ public-dockerfiles_wolfi: templates/Dockerfile.erb docker_paths $(COPY_FILES) elastic_version="${ELASTIC_VERSION}" \ arch="${ARCHITECTURE}" \ version_tag="${VERSION_TAG}" \ - release="${RELEASE}" \ + release="${RELEASE}" \ image_flavor="wolfi" \ local_artifacts="false" \ templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-wolfi" && \ @@ -178,6 +187,27 @@ build-from-dockerfiles_wolfi: public-dockerfiles_wolfi sed 's/artifacts/snapshots/g' Dockerfile > Dockerfile.tmp && mv Dockerfile.tmp Dockerfile && \ docker build --progress=plain --network=host -t $(IMAGE_TAG)-dockerfile-wolfi:$(VERSION_TAG) . +public-dockerfiles_observability-sre: templates/Dockerfile.erb docker_paths $(COPY_FILES) + ../vendor/jruby/bin/jruby -S erb -T "-"\ + created_date="${BUILD_DATE}" \ + elastic_version="${ELASTIC_VERSION}" \ + arch="${ARCHITECTURE}" \ + version_tag="${VERSION_TAG}" \ + release="${RELEASE}" \ + image_flavor="observability-sre" \ + local_artifacts="false" \ + templates/Dockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-observability-sre" && \ + cd $(ARTIFACTS_DIR)/docker && \ + cp $(ARTIFACTS_DIR)/Dockerfile-observability-sre Dockerfile && \ + tar -zcf ../logstash-observability-sre-$(VERSION_TAG)-docker-build-context.tar.gz Dockerfile bin config env2yaml pipeline + +build-from-dockerfiles_observability-sre: public-dockerfiles_observability-sre + cd $(ARTIFACTS_DIR)/docker && \ + mkdir -p dockerfile_build_observability-sre && cd dockerfile_build_observability-sre && \ + tar -zxf ../../logstash-observability-sre-$(VERSION_TAG)-docker-build-context.tar.gz && \ + sed 's/artifacts/snapshots/g' Dockerfile > Dockerfile.tmp && mv Dockerfile.tmp Dockerfile && \ + docker build --progress=plain --network=host -t $(IMAGE_TAG)-dockerfile-observability-sre:$(VERSION_TAG) . + public-dockerfiles_ironbank: templates/hardening_manifest.yaml.erb templates/IronbankDockerfile.erb ironbank_docker_paths $(COPY_IRONBANK_FILES) ../vendor/jruby/bin/jruby -S erb -T "-"\ elastic_version="${ELASTIC_VERSION}" \ @@ -187,7 +217,7 @@ public-dockerfiles_ironbank: templates/hardening_manifest.yaml.erb templates/Iro elastic_version="${ELASTIC_VERSION}" \ arch="${ARCHITECTURE}" \ version_tag="${VERSION_TAG}" \ - release="${RELEASE}" \ + release="${RELEASE}" \ image_flavor="ironbank" \ local_artifacts="false" \ templates/IronbankDockerfile.erb > "${ARTIFACTS_DIR}/Dockerfile-ironbank" && \ diff --git a/docker/templates/Dockerfile.erb b/docker/templates/Dockerfile.erb index fbf6ee5b78c..a95094bbe06 100644 --- a/docker/templates/Dockerfile.erb +++ b/docker/templates/Dockerfile.erb @@ -1,5 +1,5 @@ # This Dockerfile was generated from templates/Dockerfile.erb -<%# image_flavor 'full', oss', 'wolfi' -%> +<%# image_flavor 'full', oss', 'wolfi', 'observability-sre' -%> <% if local_artifacts == 'false' -%> <% url_root = 'https://artifacts.elastic.co/downloads/logstash' -%> <% else -%> @@ -11,6 +11,9 @@ <% elsif image_flavor == 'full' %> <% tarball = "logstash-#{elastic_version}-linux-${arch}.tar.gz" -%> <% license = 'Elastic License' -%> +<% elsif image_flavor == 'observability-sre' -%><%# 'observability-sre' needs arch to be injected from the outside -%> + <% tarball = "logstash-observability-sre-#{elastic_version}-linux-#{arch}.tar.gz" -%> + <% license = 'Elastic License' -%> <% else -%><%# 'wolfi' needs arch to be injected from the outside -%> <% tarball = "logstash-#{elastic_version}-linux-#{arch}.tar.gz" -%> <% license = 'Elastic License' -%> @@ -19,6 +22,10 @@ <% base_image = 'redhat/ubi9-minimal:latest' -%> <% go_image = 'golang:1.23' -%> <% package_manager = 'microdnf' -%> +<% elsif image_flavor == 'observability-sre' -%> + <% base_image = 'docker.elastic.co/wolfi/chainguard-base-fips' -%> + <% go_image = 'docker.elastic.co/wolfi/go:1.23' -%> + <% package_manager = 'apk' -%> <% else -%> <% base_image = 'docker.elastic.co/wolfi/chainguard-base' -%> <% go_image = 'docker.elastic.co/wolfi/go:1.23' -%> @@ -52,7 +59,7 @@ RUN \ <%= package_manager %> install -y openssl && \ <%= package_manager %> install -y which shadow-utils && \ <%= package_manager %> clean all -<% else -%><%# 'wolfi' -%> +<% else -%><%# 'wolfi', 'observability-sre' -%> <%= package_manager %> add --no-cache curl bash openssl <% end -%> @@ -64,7 +71,7 @@ RUN groupadd --gid 1000 logstash && \ --home "/usr/share/logstash" \ --no-create-home \ logstash && \ -<% else -%><%# 'wolfi' -%> +<% else -%><%# 'wolfi', 'observability-sre' -%> RUN addgroup -g 1000 logstash && \ adduser -u 1000 -G logstash \ --disabled-password \ @@ -77,7 +84,7 @@ RUN addgroup -g 1000 logstash && \ <% if image_flavor == 'full' || image_flavor == 'oss' -%> arch="$(rpm --query --queryformat='%{ARCH}' rpm)" && \ <% end -%> - curl -f -Lo logstash.tar.gz <%= url_root %>/<%= tarball %> && \ + curl --fail --location --output logstash.tar.gz <%= url_root %>/<%= tarball %> && \ tar -zxf logstash.tar.gz -C /usr/share && \ rm logstash.tar.gz && \ mv /usr/share/logstash-<%= elastic_version %> /usr/share/logstash && \ @@ -93,12 +100,64 @@ COPY --from=builder-env2yaml /tmp/go/src/env2yaml/env2yaml /usr/local/bin/env2ya COPY --chown=logstash:root config/pipelines.yml config/log4j2.properties config/log4j2.file.properties /usr/share/logstash/config/ <% if image_flavor == 'oss' -%> COPY --chown=logstash:root config/logstash-oss.yml /usr/share/logstash/config/logstash.yml -<% else -%><%# 'full', 'wolfi' -%> +<% else -%><%# 'full', 'wolfi', 'observability-sre' -%> COPY --chown=logstash:root config/logstash-full.yml /usr/share/logstash/config/logstash.yml <% end -%> COPY --chown=logstash:root pipeline/default.conf /usr/share/logstash/pipeline/logstash.conf COPY --chmod=0755 bin/docker-entrypoint /usr/local/bin/ +<% if image_flavor == 'observability-sre' -%> +# Add FIPS configuration for observability-sre image flavor +RUN mkdir -p /usr/share/logstash/config/security + +# Copy JVM security configuration files from the unpacked tarball +RUN cp /usr/share/logstash/x-pack/distributions/internal/observabilitySRE/config/security/java.security /usr/share/logstash/config/security/ && \ + cp /usr/share/logstash/x-pack/distributions/internal/observabilitySRE/config/security/java.policy /usr/share/logstash/config/security/ && \ + chown --recursive logstash:root /usr/share/logstash/config/security/ + +# list the classes provided by the fips BC +RUN find /usr/share/logstash -name *.jar | grep lib + +# Convert JKS to BCFKS for truststore and keystore +RUN /usr/share/logstash/jdk/bin/keytool -importkeystore \ + -srckeystore /usr/share/logstash/jdk/lib/security/cacerts \ + -destkeystore /usr/share/logstash/config/security/cacerts.bcfks \ + -srcstoretype jks \ + -deststoretype bcfks \ + -providerpath /usr/share/logstash/logstash-core/lib/jars/bc-fips-2.0.0.jar \ + -provider org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider \ + -deststorepass changeit \ + -srcstorepass changeit \ + -noprompt + +RUN /usr/share/logstash/jdk/bin/keytool -importkeystore \ + -srckeystore /usr/share/logstash/jdk/lib/security/cacerts \ + -destkeystore /usr/share/logstash/config/security/keystore.bcfks \ + -srcstoretype jks \ + -deststoretype bcfks \ + -providerpath /usr/share/logstash/logstash-core/lib/jars/bc-fips-2.0.0.jar \ + -provider org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider \ + -deststorepass changeit \ + -srcstorepass changeit \ + -noprompt + +# Set Java security properties through LS_JAVA_OPTS +ENV LS_JAVA_OPTS="\ + -Djava.security.properties=/usr/share/logstash/config/security/java.security \ + -Djava.security.policy=/usr/share/logstash/config/security/java.policy \ + -Djavax.net.ssl.keyStore=/usr/share/logstash/config/security/keystore.bcfks \ + -Djavax.net.ssl.keyStoreType=BCFKS \ + -Djavax.net.ssl.keyStoreProvider=BCFIPS \ + -Djavax.net.ssl.keyStorePassword=changeit \ + -Djavax.net.ssl.trustStore=/usr/share/logstash/config/security/cacerts.bcfks \ + -Djavax.net.ssl.trustStoreType=BCFKS \ + -Djavax.net.ssl.trustStoreProvider=BCFIPS \ + -Djavax.net.ssl.trustStorePassword=changeit \ + -Dssl.KeyManagerFactory.algorithm=PKIX \ + -Dssl.TrustManagerFactory.algorithm=PKIX \ + -Dorg.bouncycastle.fips.approved_only=true" +<% end -%> + WORKDIR /usr/share/logstash USER 1000 diff --git a/logstash-core/spec/logstash/patches_spec.rb b/logstash-core/spec/logstash/patches_spec.rb index 2014300d295..81f2d038a83 100644 --- a/logstash-core/spec/logstash/patches_spec.rb +++ b/logstash-core/spec/logstash/patches_spec.rb @@ -20,7 +20,7 @@ require "flores/pki" require "logstash/json" -describe "OpenSSL defaults" do +describe "OpenSSL defaults", :skip_fips do subject { OpenSSL::SSL::SSLContext.new } # OpenSSL::SSL::SSLContext#ciphers returns an array of diff --git a/logstash-core/spec/logstash/persisted_queue_config_validator_spec.rb b/logstash-core/spec/logstash/persisted_queue_config_validator_spec.rb index 98a3110153d..5b769ef8fbc 100644 --- a/logstash-core/spec/logstash/persisted_queue_config_validator_spec.rb +++ b/logstash-core/spec/logstash/persisted_queue_config_validator_spec.rb @@ -63,7 +63,10 @@ before do # create a 2MB file ::File.open(page_file, 'wb') do |f| - f.write(SecureRandom.random_bytes(2**21)) + # Work around FIPS mode limitations in requesting large amounts of random data + # We need 64 chunks of 32KB to create a 2MB file + # See https://github.com/elastic/ingest-dev/issues/5072 + 64.times { f.write(SecureRandom.random_bytes(2**15)) } end end diff --git a/qa/integration/fixtures/plugins/generate-gems.sh b/qa/integration/fixtures/plugins/generate-gems.sh index 2871367b2c3..43848234863 100755 --- a/qa/integration/fixtures/plugins/generate-gems.sh +++ b/qa/integration/fixtures/plugins/generate-gems.sh @@ -1,4 +1,8 @@ #!/usr/bin/env sh +# Add jruby bin directory to the PATH after existing entries for gem executable +SCRIPT_DIR="$( dirname "$0" )" +PATH="$PATH:$SCRIPT_DIR/../../../../vendor/jruby/bin" +export PATH -cd "$( dirname "$0" )" +cd "$SCRIPT_DIR" find . -name '*.gemspec' | xargs -n1 gem build \ No newline at end of file diff --git a/qa/integration/rspec.rb b/qa/integration/rspec.rb index 247ef9b4cf1..838286ed820 100644 --- a/qa/integration/rspec.rb +++ b/qa/integration/rspec.rb @@ -33,4 +33,8 @@ RSpec.clear_examples +RSpec.configure do |c| + c.filter_run_excluding skip_fips: true if java.lang.System.getProperty("org.bouncycastle.fips.approved_only") == "true" +end + return RSpec::Core::Runner.run($JUNIT_ARGV).to_i diff --git a/qa/integration/specs/cli/install_spec.rb b/qa/integration/specs/cli/install_spec.rb index 0463fc57d5f..c058418f9f2 100644 --- a/qa/integration/specs/cli/install_spec.rb +++ b/qa/integration/specs/cli/install_spec.rb @@ -105,7 +105,7 @@ def plugin_filename_re(name, version) end end - context "pack" do + context "pack", :skip_fips do context "when the command is run in the `$LOGSTASH_HOME`" do include_examples "install from a pack" end @@ -128,7 +128,7 @@ def plugin_filename_re(name, version) end end - context "install non bundle plugin" do + context "install non bundle plugin", :skip_fips do let(:plugin_name) { "logstash-input-github" } let(:install_command) { "bin/logstash-plugin install" } @@ -163,7 +163,7 @@ def plugin_filename_re(name, version) end end - context "rubygems hosted plugin" do + context "rubygems hosted plugin", :skip_fips do include_context "pluginmanager validation helpers" shared_context("install over existing") do before(:each) do diff --git a/qa/integration/specs/cli/prepare_offline_pack_spec.rb b/qa/integration/specs/cli/prepare_offline_pack_spec.rb index 22e5b423264..cf405925fcd 100644 --- a/qa/integration/specs/cli/prepare_offline_pack_spec.rb +++ b/qa/integration/specs/cli/prepare_offline_pack_spec.rb @@ -20,7 +20,7 @@ require_relative "../../services/logstash_service" require_relative "../../framework/helpers" -describe "CLI > logstash-plugin prepare-offline-pack" do +describe "CLI > logstash-plugin prepare-offline-pack", :skip_fips do before(:all) do @fixture = Fixture.new(__FILE__) @logstash_plugin = @fixture.get_service("logstash").plugin_cli diff --git a/qa/integration/specs/cli/remove_spec.rb b/qa/integration/specs/cli/remove_spec.rb index bf168aafc4f..9e5e17085ed 100644 --- a/qa/integration/specs/cli/remove_spec.rb +++ b/qa/integration/specs/cli/remove_spec.rb @@ -22,7 +22,7 @@ require_relative "pluginmanager_spec_helper" require "logstash/devutils/rspec/spec_helper" -describe "CLI > logstash-plugin remove" do +describe "CLI > logstash-plugin remove", :skip_fips do include_context "pluginmanager validation helpers" diff --git a/qa/integration/specs/cli/update_spec.rb b/qa/integration/specs/cli/update_spec.rb index 56ad75ec791..ee0adfe6771 100644 --- a/qa/integration/specs/cli/update_spec.rb +++ b/qa/integration/specs/cli/update_spec.rb @@ -22,7 +22,7 @@ require_relative "pluginmanager_spec_helper" require "logstash/devutils/rspec/spec_helper" -describe "CLI > logstash-plugin update" do +describe "CLI > logstash-plugin update", :skip_fips do include_context "pluginmanager validation helpers" @@ -62,4 +62,4 @@ expect("logstash-filter-qatest-0.1.0").to_not be_installed_gem end end -end \ No newline at end of file +end diff --git a/qa/integration/specs/install_java_plugin_spec.rb b/qa/integration/specs/install_java_plugin_spec.rb index 1447280931b..179ff1de49d 100644 --- a/qa/integration/specs/install_java_plugin_spec.rb +++ b/qa/integration/specs/install_java_plugin_spec.rb @@ -21,7 +21,7 @@ require "logstash/devutils/rspec/spec_helper" require "stud/temporary" -describe "Install and run java plugin" do +describe "Install and run java plugin", :skip_fips do before(:all) do @fixture = Fixture.new(__FILE__) @logstash = @fixture.get_service("logstash") diff --git a/qa/integration/specs/webserver_spec.rb b/qa/integration/specs/webserver_spec.rb index d3440d28961..0ee3e29f3c1 100644 --- a/qa/integration/specs/webserver_spec.rb +++ b/qa/integration/specs/webserver_spec.rb @@ -22,7 +22,7 @@ require "stud/try" require "manticore" -describe 'api webserver' do +describe 'api webserver', :skip_fips do let!(:logger) { double("Logger").as_null_object } let!(:agent) { double("Agent").as_null_object } subject(:webserver) { LogStash::WebServer.new(logger, agent, webserver_options) } diff --git a/rakelib/artifacts.rake b/rakelib/artifacts.rake index 0e40d376472..1dda55bc956 100644 --- a/rakelib/artifacts.rake +++ b/rakelib/artifacts.rake @@ -107,7 +107,7 @@ namespace "artifact" do @exclude_paths << 'vendor/jruby/lib/ruby/gems/shared/specifications/net-imap-0.2.3.gemspec' @exclude_paths << 'vendor/jruby/lib/ruby/gems/shared/gems/net-imap-0.2.3/**/*' - @exclude_paths + @exclude_paths.freeze end def oss_exclude_paths @@ -163,7 +163,7 @@ namespace "artifact" do desc "Generate rpm, deb, tar and zip artifacts" task "all" => ["prepare", "build"] - task "docker_only" => ["prepare", "build_docker_full", "build_docker_oss", "build_docker_wolfi"] + task "docker_only" => ["prepare", "build_docker_full", "build_docker_oss", "build_docker_wolfi", "build_docker_observabilitySRE"] desc "Build all (jdk bundled and not) tar.gz and zip of default logstash plugins with all dependencies" task "archives" => ["prepare", "generate_build_metadata"] do @@ -189,25 +189,25 @@ namespace "artifact" do safe_system("./gradlew bootstrap") # force the build of Logstash jars end - def create_archive_pack(license_details, arch, *oses) + def create_archive_pack(license_details, arch, *oses, &tar_interceptor) oses.each do |os_name| puts("[artifact:archives] Building tar.gz/zip of default plugins for OS: #{os_name}, arch: #{arch}") - create_single_archive_pack(os_name, arch, license_details) + create_single_archive_pack(os_name, arch, license_details, &tar_interceptor) end end - def create_single_archive_pack(os_name, arch, license_details) + def create_single_archive_pack(os_name, arch, license_details, &tar_interceptor) safe_system("./gradlew copyJdk -Pjdk_bundle_os=#{os_name} -Pjdk_arch=#{arch}") if arch == 'arm64' arch = 'aarch64' end case os_name when "linux" - build_tar(*license_details, platform: "-linux-#{arch}") + build_tar(*license_details, platform: "-linux-#{arch}", &tar_interceptor) when "windows" build_zip(*license_details, platform: "-windows-#{arch}") when "darwin" - build_tar(*license_details, platform: "-darwin-#{arch}") + build_tar(*license_details, platform: "-darwin-#{arch}", &tar_interceptor) end safe_system("./gradlew deleteLocalJdk -Pjdk_bundle_os=#{os_name}") end @@ -255,6 +255,27 @@ namespace "artifact" do safe_system("./gradlew bootstrap") # force the build of Logstash jars end + desc "Build jdk bundled tar.gz of observabilitySRE logstash plugins with all dependencies for docker" + task "archives_docker_observabilitySRE" => ["prepare-observabilitySRE", "generate_build_metadata"] do + #with bundled JDKs + @bundles_jdk = true + exclude_paths = default_exclude_paths + %w( + bin/logstash-plugin + bin/logstash-plugin.bat + bin/logstash-keystore + bin/logstash-keystore.bat + ) + license_details = ['ELASTIC-LICENSE','-observability-sre', exclude_paths] + %w(x86_64 arm64).each do |arch| + create_archive_pack(license_details, arch, "linux") do |dedicated_directory_tar| + # injection point: Use `DedicatedDirectoryTarball#write(source_file, destination_path)` to + # copy additional files into the tarball + puts "HELLO(#{dedicated_directory_tar})" + end + end + safe_system("./gradlew bootstrap") # force the build of Logstash jars + end + desc "Build an RPM of logstash with all dependencies" task "rpm" => ["prepare", "generate_build_metadata"] do #with bundled JDKs @@ -353,6 +374,12 @@ namespace "artifact" do build_docker('oss') end + desc "Build observabilitySRE docker image" + task "docker_observabilitySRE" => ["prepare-observabilitySRE", "generate_build_metadata", "archives_docker_observabilitySRE"] do + puts("[docker_observabilitySRE] Building observabilitySRE docker image") + build_docker('observability-sre') + end + desc "Build wolfi docker image" task "docker_wolfi" => %w(prepare generate_build_metadata archives_docker) do puts("[docker_wolfi] Building Wolfi docker image") @@ -365,6 +392,7 @@ namespace "artifact" do build_dockerfile('oss') build_dockerfile('full') build_dockerfile('wolfi') + build_dockerfile('observability-sre') build_dockerfile('ironbank') end @@ -381,6 +409,19 @@ namespace "artifact" do end end + desc "Generate Dockerfile for observability-sre images" + task "dockerfile_observabilitySRE" => ["prepare-observabilitySRE", "generate_build_metadata"] do + puts("[dockerfiles] Building observability-sre Dockerfile") + build_dockerfile('observability-sre') + end + + namespace "dockerfile_observabilitySRE" do + desc "Build ObservabilitySrE Docker image from Dockerfile context files" + task "docker" => ["archives_docker_observabilitySRE", "dockerfile_observabilitySRE"] do + build_docker_from_dockerfiles('observability-sre') + end + end + desc "Generate Dockerfile for full images" task "dockerfile_full" => ["prepare", "generate_build_metadata"] do puts("[dockerfiles] Building full Dockerfiles") @@ -425,6 +466,7 @@ namespace "artifact" do Rake::Task["artifact:docker_wolfi"].invoke Rake::Task["artifact:dockerfiles"].invoke Rake::Task["artifact:docker_oss"].invoke + Rake::Task["artifact:docker_observabilitySRE"].invoke end Rake::Task["artifact:deb_oss"].invoke @@ -444,6 +486,12 @@ namespace "artifact" do Rake::Task["artifact:dockerfile_oss:docker"].invoke end + task "build_docker_observabilitySRE" => [:generate_build_metadata] do + Rake::Task["artifact:docker_observabilitySRE"].invoke + Rake::Task["artifact:dockerfile_observabilitySRE"].invoke + Rake::Task["artifact:dockerfile_observabilitySRE:docker"].invoke + end + task "build_docker_wolfi" => [:generate_build_metadata] do Rake::Task["artifact:docker_wolfi"].invoke Rake::Task["artifact:dockerfile_wolfi"].invoke @@ -527,6 +575,17 @@ namespace "artifact" do end end + task "prepare-observabilitySRE" do + if ENV['SKIP_PREPARE'] != "1" + Rake::Task['bootstrap'].invoke + Rake::Task['plugin:install-default'].invoke + Rake::Task['plugin:install'].invoke('logstash-filter-age') + Rake::Task['plugin:trim-for-observabilitySRE'].invoke + Rake::Task['plugin:install-fips-validation-plugin'].invoke + Rake::Task['artifact:clean-bundle-config'].invoke + end + end + def ensure_logstash_version_constant_defined # we do not want this file required when rake (ruby) parses this file # only when there is a task executing, not at the very top of this file @@ -535,7 +594,7 @@ namespace "artifact" do end end - def build_tar(license, tar_suffix = nil, exclude_paths = default_exclude_paths, platform: '') + def build_tar(license, tar_suffix = nil, exclude_paths = default_exclude_paths, platform: '', &tar_interceptor) require "zlib" require 'rubygems' require 'rubygems/package' @@ -549,36 +608,80 @@ namespace "artifact" do puts("[artifact:tar] building #{tarpath}") gz = Zlib::GzipWriter.new(File.new(tarpath, "wb"), Zlib::BEST_COMPRESSION) Minitar::Writer.open(gz) do |tar| + dedicated_directory_tarball = DedicatedDirectoryTarball.new(tar, "logstash-#{LOGSTASH_VERSION}#{PACKAGE_SUFFIX}") files(exclude_paths).each do |path| - write_to_tar(tar, path, "logstash-#{LOGSTASH_VERSION}#{PACKAGE_SUFFIX}/#{path}") + dedicated_directory_tarball.write(path) end source_license_path = "licenses/#{license}.txt" fail("Missing source license: #{source_license_path}") unless File.exist?(source_license_path) - write_to_tar(tar, source_license_path, "logstash-#{LOGSTASH_VERSION}#{PACKAGE_SUFFIX}/LICENSE.txt") + dedicated_directory_tarball.write(source_license_path, "LICENSE.txt") # add build.rb to tar metadata_file_path_in_tar = File.join("logstash-core", "lib", "logstash", "build.rb") - path_in_tar = File.join("logstash-#{LOGSTASH_VERSION}#{PACKAGE_SUFFIX}", metadata_file_path_in_tar) - write_to_tar(tar, BUILD_METADATA_FILE.path, path_in_tar) + dedicated_directory_tarball.write(BUILD_METADATA_FILE.path, metadata_file_path_in_tar) + + # yield to the tar interceptor if we have one + yield(dedicated_directory_tarball) if block_given? end gz.close end - def write_to_tar(tar, path, path_in_tar) - stat = File.lstat(path) - if stat.directory? - tar.mkdir(path_in_tar, :mode => stat.mode) - elsif stat.symlink? - tar.symlink(path_in_tar, File.readlink(path), :mode => stat.mode) - else - tar.add_file_simple(path_in_tar, :mode => stat.mode, :size => stat.size) do |io| - File.open(path, 'rb') do |fd| - chunk = nil - size = 0 - size += io.write(chunk) while chunk = fd.read(16384) - if stat.size != size - raise "Failure to write the entire file (#{path}) to the tarball. Expected to write #{stat.size} bytes; actually write #{size}" + ## + # A `DedicatedDirectoryTarball` writes everything into a dedicated + # directory that is known at init-time (e.g., NOT a tarbomb). All paths are + class DedicatedDirectoryTarball + def initialize(minitar_writer, dedicated_directory) + @minitar_writer = minitar_writer + @dedicated_directory = Pathname.new(dedicated_directory) + end + + ## + # Write the contents of the file, directory, or symlink in `source_path` to + # the `destination_path` inside the tarball's dedicated directory. + # @param source_path [String]: the path to the file to copy, relative to PWD + # @param destination_path [String]: the path, relative to the tarball's dedicated directory, to + # write to (default: `source_path`) + # @return [void] + def write(source_path, destination_path=source_path) + write_to_tar(@minitar_writer, source_path, expand(destination_path)) + + nil + end + + def to_s + "#<#{self.class.name}:#{@dedicated_directory}>" + end + + private + + ## + # Expands the given `destination_path` relative to the dedicated directory, + # ensuring that the result is inside the dedicated directory + # @param destination_path [String] + # @return [String] + def expand(destination_path) + expanded_destination_path = @dedicated_directory / destination_path + fail("illegal destination path `#{destination_path}`") unless expanded_destination_path.descend.peek == @dedicated_directory + + expanded_destination_path.to_s + end + + def write_to_tar(tar, path, path_in_tar) + stat = File.lstat(path) + if stat.directory? + tar.mkdir(path_in_tar, :mode => stat.mode) + elsif stat.symlink? + tar.symlink(path_in_tar, File.readlink(path), :mode => stat.mode) + else + tar.add_file_simple(path_in_tar, :mode => stat.mode, :size => stat.size) do |io| + File.open(path, 'rb') do |fd| + chunk = nil + size = 0 + size += io.write(chunk) while chunk = fd.read(16384) + if stat.size != size + raise "Failure to write the entire file (#{path}) to the tarball. Expected to write #{stat.size} bytes; actually write #{size}" + end end end end diff --git a/rakelib/plugin.rake b/rakelib/plugin.rake index a41c8fa1e90..47d572f417a 100644 --- a/rakelib/plugin.rake +++ b/rakelib/plugin.rake @@ -17,6 +17,9 @@ require_relative "default_plugins" require 'rubygems' +require 'shellwords' + +require 'bootstrap/environment' namespace "plugin" do def install_plugins(*args) @@ -29,6 +32,36 @@ namespace "plugin" do LogStash::PluginManager::Main.run("bin/logstash-plugin", ["remove", plugin] + more_plugins) end + def list_plugins(search=nil, expand: nil, verbose: nil) + require_relative "../lib/pluginmanager/main" + args = [] + args << "--verbose" if verbose + args << search unless search.nil? + + stdout = invoke_plugin_manager!("list", *args) + + stdout.lines.select do |line| + # STDOUT pollution removal needed until 8.19 and 9.1 + # https://github.com/elastic/logstash/pull/17125 + next false if line.match?(/^Using (system java|bundled JDK|LS_JAVA_HOME defined java)/) + + # post-execution filtration is needed until 8.19 and 9.1 + # when list --[no-]expand flag is supported, use it instead + # https://github.com/elastic/logstash/pull/17124 + expand || line.match?(/^[a-z]/) + end.map(&:chomp) + end + + # the plugin manager's list command (and possibly others) + def invoke_plugin_manager!(command, *args) + plugin_manager_bin = Pathname.new(LogStash::Environment::LOGSTASH_HOME) / "bin" / "logstash-plugin" + stdout_and_stderr = %x(#{Shellwords.escape(plugin_manager_bin)} #{Shellwords.join([command]+args)} 2>&1) + unless $?.success? + fail "ERROR INVOKING PLUGIN MANAGER: #{stdout_and_stderr}" + end + stdout_and_stderr + end + task "install-base" => "bootstrap" do puts("[plugin:install-base] Installing base dependencies") install_plugins("--development", "--preserve") @@ -78,6 +111,48 @@ namespace "plugin" do task.reenable # Allow this task to be run again end + task "trim-for-observabilitySRE" do |task, _| + puts("[plugin:trim-for-observabilitySRE] Removing plugins not necessary for observabilitySRE") + + allow_list = (Pathname.new(__dir__).parent / "x-pack" / "distributions" / "internal" / "observabilitySRE" / "plugin-allow-list.txt").readlines.map(&:chomp) + installed_plugins = list_plugins(expand: false) + + excess_plugins = installed_plugins - allow_list + + remove_plugin(*excess_plugins) unless excess_plugins.empty? + + task.reenable # Allow this task to be run again + end + + task "build-fips-validation-plugin" do |task, _| + puts("[plugin:build-fips-validation-plugin] installing fips_validation plugin") + + with_merged_env("GEM_BUILD_VERSION" => get_versions.fetch("logstash")) do + name = "logstash-integration-fips_validation" + path = Pathname.new(__dir__).parent / "x-pack" / "distributions" / "internal" / "observabilitySRE" / "plugin" / name + + # ensure fresh GEM_BUILD_VERSION comes from the env + path.glob("GEM_BUILD_VERSION").each(&:unlink) + + Rake::Task["plugin:build-local-core-gem"].invoke(name, path.to_s) + end + + task.reenable # Allow this task to be run again + end + + task "install-fips-validation-plugin" => "build-fips-validation-plugin" do |task, _| + puts("[plugin:install-fips-validation-plugin] installing fips_validation plugin") + + path = "build/gems" + name = "logstash-integration-fips_validation" + gems = Dir[File.join(path, "#{name}-*.gem")] + abort("ERROR: #{name} gem not found in #{path}") if gems.size != 1 + puts("[plugin:install-fips-validation-plugin] Installing #{gems.first}") + install_plugins("--no-verify", gems.first) + + task.reenable # Allow this task to be run again + end + task "clean-local-core-gem", [:name, :path] do |task, args| name = args[:name] path = args[:path] @@ -121,4 +196,13 @@ namespace "plugin" do task.reenable # Allow this task to be run again end + + # @param env [Hash] + def with_merged_env(env) + backup = ENV.to_hash + ENV.replace(backup.merge(env).compact) + yield + ensure + ENV.replace(backup) + end end # namespace "plugin" diff --git a/rubyUtils.gradle b/rubyUtils.gradle index f92b4139949..45943478b27 100644 --- a/rubyUtils.gradle +++ b/rubyUtils.gradle @@ -198,6 +198,12 @@ Object executeJruby(File projectDir, File buildDir, Closure /* Object*/ block env.put "GEM_HOME", gemDir env.put "GEM_SPEC_CACHE", "${buildDir}/cache".toString() env.put "GEM_PATH", gemDir + // Pass through ORG_GRADLE_PROJECT_fedrampHighMode if it exists in the project properties + // See https://docs.gradle.org/current/userguide/build_environment.html#setting_a_project_property + // For more information about setting properties via env vars prefixed with ORG_GRADLE_PROJECT + if (project.hasProperty('fedrampHighMode') && project.property('fedrampHighMode').toBoolean()) { + env.put "ORG_GRADLE_PROJECT_fedrampHighMode", "true" + } try { block(jruby) } finally { diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 0c736aea01b..836b80d4846 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -63,7 +63,7 @@ def puts(payload) Flores::RSpec.configure(c) c.include LogStashHelper c.extend LogStashHelper - + c.filter_run_excluding skip_fips: true if java.lang.System.getProperty("org.bouncycastle.fips.approved_only") == "true" if ENV['COVERAGE'] c.after(:suite) do SimpleCov.result.format! diff --git a/spec/unit/bootstrap/bundler_spec.rb b/spec/unit/bootstrap/bundler_spec.rb index d7ea654269d..44b65207d7a 100644 --- a/spec/unit/bootstrap/bundler_spec.rb +++ b/spec/unit/bootstrap/bundler_spec.rb @@ -124,7 +124,7 @@ end end - context "when updating" do + context "when updating", :skip_fips do let(:options) { { :update => 'logstash-input-stdin' } } context 'with a specific plugin' do diff --git a/spec/unit/plugin_manager/offline_plugin_packager_spec.rb b/spec/unit/plugin_manager/offline_plugin_packager_spec.rb index c66e7d66deb..785265b1329 100644 --- a/spec/unit/plugin_manager/offline_plugin_packager_spec.rb +++ b/spec/unit/plugin_manager/offline_plugin_packager_spec.rb @@ -79,7 +79,7 @@ def retrieve_dependencies_gems(path) end end - context "when the plugins exist" do + context "when the plugins exist", :skip_fips do before :all do Paquet.ui = Paquet::SilentUI end diff --git a/tools/dependencies-report/src/main/resources/licenseMapping.csv b/tools/dependencies-report/src/main/resources/licenseMapping.csv index 07ee03be163..128eef59960 100644 --- a/tools/dependencies-report/src/main/resources/licenseMapping.csv +++ b/tools/dependencies-report/src/main/resources/licenseMapping.csv @@ -147,6 +147,10 @@ dependency,dependencyUrl,licenseOverride,copyright,sourceURL "org.apache.logging.log4j:log4j-core:",https://logging.apache.org/log4j/2.x/index.html,Apache-2.0 "org.apache.logging.log4j:log4j-jcl:",https://logging.apache.org/log4j/2.x/index.html,Apache-2.0 "org.apache.logging.log4j:log4j-slf4j-impl:",https://logging.apache.org/log4j/2.x/index.html,Apache-2.0 +"org.bouncycastle:bc-fips:",https://www.bouncycastle.org,MIT +"org.bouncycastle:bcpkix-fips:",https://www.bouncycastle.org,MIT +"org.bouncycastle:bctls-fips:",https://www.bouncycastle.org,MIT +"org.bouncycastle:bcutil-fips:",https://www.bouncycastle.org,MIT "org.codehaus.janino:commons-compiler:",https://github.com/janino-compiler/janino,BSD-3-Clause "org.codehaus.janino:janino:",https://github.com/janino-compiler/janino,BSD-3-Clause "org.codehaus.mojo:animal-sniffer-annotations:",https://www.mojohaus.org/animal-sniffer/animal-sniffer-annotations/,MIT diff --git a/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bc-fips-NOTICE.txt b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bc-fips-NOTICE.txt new file mode 100644 index 00000000000..7fe95c557b5 --- /dev/null +++ b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bc-fips-NOTICE.txt @@ -0,0 +1,7 @@ +Copyright (c) 2014-2023 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bcpkix-fips-NOTICE.txt b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bcpkix-fips-NOTICE.txt new file mode 100644 index 00000000000..7fe95c557b5 --- /dev/null +++ b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bcpkix-fips-NOTICE.txt @@ -0,0 +1,7 @@ +Copyright (c) 2014-2023 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bctls-fips-NOTICE.txt b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bctls-fips-NOTICE.txt new file mode 100644 index 00000000000..7fe95c557b5 --- /dev/null +++ b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bctls-fips-NOTICE.txt @@ -0,0 +1,7 @@ +Copyright (c) 2014-2023 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bcutil-fips-NOTICE.txt b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bcutil-fips-NOTICE.txt new file mode 100644 index 00000000000..7fe95c557b5 --- /dev/null +++ b/tools/dependencies-report/src/main/resources/notices/org.bouncycastle!bcutil-fips-NOTICE.txt @@ -0,0 +1,7 @@ +Copyright (c) 2014-2023 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/x-pack/build.gradle b/x-pack/build.gradle index 42ba297cdec..33c4c8081d0 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -6,6 +6,9 @@ description = """Logstash X-Pack""" +project.ext.LOGSTASH_CORE_PATH = "${projectDir}/../logstash-core" +apply from: "../rubyUtils.gradle" + repositories { mavenCentral() } @@ -56,9 +59,81 @@ tasks.register("rubyIntegrationTests", Test) { jvmArgs = ['--add-opens', 'java.base/sun.nio.ch=ALL-UNNAMED', '--add-opens', 'java.base/java.io=ALL-UNNAMED'] } dependsOn (":copyEs") + dependsOn ":assemble" + dependsOn "buildFipsValidationGem" inputs.files fileTree("${projectDir}/qa") inputs.files fileTree("${projectDir}/lib") inputs.files fileTree("${projectDir}/modules") + inputs.files fileTree("${rootProject.projectDir}/Gemfile.lock") + inputs.files fileTree("${rootProject.projectDir}/logstash-core/lib") systemProperty 'logstash.root.dir', projectDir.parent include '/org/logstash/xpack/test/RSpecIntegrationTests.class' } + +tasks.register("buildFipsValidationGem") { + doLast { + rake(rootProject.projectDir, rootProject.buildDir, 'plugin:build-fips-validation-plugin') + } +} + +tasks.register("observabilitySREsmokeTests", Test) { + description = "Run ObservabilitySRE smoke tests using docker-compose and RSpec" + // Need to have set up the ruby environment for rspec even through we are running in container + dependsOn(":bootstrap", ":logstash-core:assemble", ":installDevelopmentGems") + inputs.files fileTree("${projectDir}/distributions/internal/observabilitySRE/qa/smoke") + doFirst { + // Generate the certificates first + exec { + workingDir file("distributions/internal/observabilitySRE/qa/smoke/docker/certs") + commandLine 'bash', './generate.sh' + ignoreExitValue = false + } + def result = exec { + workingDir file("distributions/internal/observabilitySRE/qa/smoke/docker") + commandLine 'docker-compose', 'up', '--detach' + ignoreExitValue = true + } + if (result.exitValue != 0) { + throw new GradleException("Docker compose failed to start") + } + // Give containers time to start and show logs + sleep(30000) + exec { + workingDir file("distributions/internal/observabilitySRE/qa/smoke/docker") + commandLine 'docker-compose', 'logs' + } + } + systemProperty 'logstash.root.dir', projectDir.parent + include '**/org/logstash/xpack/test/RSpecObservabilitySRETests.class' + doLast { + exec { + workingDir file("distributions/internal/observabilitySRE/qa/smoke/docker") + commandLine 'docker-compose', 'down', '--volumes' + ignoreExitValue = true + } + // Clean up the generated certificates + delete fileTree("distributions/internal/observabilitySRE/qa/smoke/docker/certs").include("*.key", "*.crt", "*.csr", "*.srl") + } +} + +tasks.register("observabilitySREacceptanceTests", Test) { + description = "Run ObservabilitySRE acceptance tests" + // Need to have set up the ruby environment for rspec even through we are running in container + dependsOn(":bootstrap", ":logstash-core:assemble", ":installDevelopmentGems") + + inputs.files fileTree("${projectDir}/distributions/internal/observabilitySRE/qa/smoke") + doFirst { + // Generate the certificates first + exec { + workingDir file("distributions/internal/observabilitySRE/qa/acceptance/docker/certs") + commandLine 'bash', './generate.sh' + ignoreExitValue = false + } + } + systemProperty 'logstash.root.dir', projectDir.parent + include '**/org/logstash/xpack/test/RSpecObservabilitySREAcceptanceTests.class' + doLast { + // Clean up the generated certificates + delete fileTree("distributions/internal/observabilitySRE/qa/acceptance/docker/certs").include("*.key", "*.crt", "*.csr", "*.srl") + } +} \ No newline at end of file diff --git a/x-pack/ci/integration_tests.sh b/x-pack/ci/integration_tests.sh index 2ca952210b7..9aa6d9c8377 100755 --- a/x-pack/ci/integration_tests.sh +++ b/x-pack/ci/integration_tests.sh @@ -17,4 +17,7 @@ if [ -n "$BUILD_JAVA_HOME" ]; then export LS_JAVA_HOME="$BUILD_JAVA_HOME" fi -./gradlew runXPackIntegrationTests \ No newline at end of file +# Option for running in fedramp high mode +FEDRAMP_FLAG="${FEDRAMP_HIGH_MODE/#/-PfedrampHighMode=}" + +./gradlew runXPackIntegrationTests $FEDRAMP_FLAG diff --git a/x-pack/ci/unit_tests.sh b/x-pack/ci/unit_tests.sh index 8ac523c4a31..054570590cd 100755 --- a/x-pack/ci/unit_tests.sh +++ b/x-pack/ci/unit_tests.sh @@ -16,4 +16,8 @@ if [ -n "$BUILD_JAVA_HOME" ]; then GRADLE_OPTS="$GRADLE_OPTS -Dorg.gradle.java.home=$BUILD_JAVA_HOME" fi -./gradlew runXPackUnitTests \ No newline at end of file +# Option for running in fedramp high mode +FEDRAMP_FLAG="${FEDRAMP_HIGH_MODE/#/-PfedrampHighMode=}" + +./gradlew runXPackUnitTests $FEDRAMP_FLAG + diff --git a/x-pack/distributions/internal/observabilitySRE/build-ext.gradle b/x-pack/distributions/internal/observabilitySRE/build-ext.gradle new file mode 100644 index 00000000000..5b97a00d28b --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/build-ext.gradle @@ -0,0 +1,57 @@ +ext { + fedrampHighMode = Objects.requireNonNullElse(project.findProperty('fedrampHighMode'), false).toBoolean() +} + +subprojects { + ext { + fedrampHighMode = rootProject.fedrampHighMode + } +} + +allprojects { + afterEvaluate { + // Preserve fedrampHighMode option across subprocesses + if (rootProject.fedrampHighMode) { + tasks.withType(JavaExec).configureEach { + environment("ORG_GRADLE_PROJECT_fedrampHighMode", "true") + } + + tasks.withType(Exec).configureEach { + environment("ORG_GRADLE_PROJECT_fedrampHighMode", "true") + } + } + tasks.withType(Test) { + if (rootProject.fedrampHighMode) { + logger.debug("configuring ${it} to run in FIPSMode ") + systemProperty "java.security.properties", System.getenv("JAVA_SECURITY_PROPERTIES") + systemProperty "javax.net.ssl.keyStore", "/etc/java/security/keystore.bcfks" + systemProperty "javax.net.ssl.keyStoreType", "BCFKS" + systemProperty "javax.net.ssl.keyStoreProvider", "BCFIPS" + systemProperty "javax.net.ssl.keyStorePassword", "changeit" + systemProperty "javax.net.ssl.trustStore", "/etc/java/security/cacerts.bcfks" + systemProperty "javax.net.ssl.trustStoreType", "BCFKS" + systemProperty "javax.net.ssl.trustStoreProvider", "BCFIPS" + systemProperty "javax.net.ssl.trustStorePassword", "changeit" + systemProperty "ssl.KeyManagerFactory.algorithm", "PKIX" + systemProperty "ssl.TrustManagerFactory.algorithm", "PKIX" + systemProperty "org.bouncycastle.fips.approved_only", "true" + } + } + } +} + +project(':logstash-core') { + afterEvaluate { + if (rootProject.fedrampHighMode) { + logger.lifecycle("Adding BouncyCastle FIPS dependencies to logstash-core") + dependencies { + // Add FIPS dependencies to the runtimeOnly configuration + // This ensures they'll be included by the existing copyRuntimeLibs task + runtimeOnly "org.bouncycastle:bc-fips:2.0.0" + runtimeOnly "org.bouncycastle:bcpkix-fips:2.0.7" + runtimeOnly "org.bouncycastle:bctls-fips:2.0.19" + runtimeOnly "org.bouncycastle:bcutil-fips:2.0.3" + } + } + } +} diff --git a/x-pack/distributions/internal/observabilitySRE/config/security/java.policy b/x-pack/distributions/internal/observabilitySRE/config/security/java.policy new file mode 100644 index 00000000000..12db7ab4019 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/config/security/java.policy @@ -0,0 +1,21 @@ +grant { + // Your existing permissions + permission java.lang.PropertyPermission "java.runtime.name", "read"; + permission java.lang.RuntimePermission "accessClassInPackage.sun.security.internal.spec"; + permission java.lang.RuntimePermission "getProtectionDomain"; + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; + permission org.bouncycastle.crypto.CryptoServicesPermission "exportKeys"; + + // Add provider permissions + permission java.security.SecurityPermission "putProviderProperty.BCFIPS"; + permission java.security.SecurityPermission "insertProvider.BCFIPS"; + permission java.security.SecurityPermission "putProviderProperty.BCJSSE"; + permission java.security.SecurityPermission "insertProvider.BCJSSE"; +}; + +deny { + permission java.security.SecurityPermission "putProviderProperty.BC"; + permission java.security.SecurityPermission "insertProvider.BC"; + permission java.security.SecurityPermission "removeProvider.BC"; +}; \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/config/security/java.security b/x-pack/distributions/internal/observabilitySRE/config/security/java.security new file mode 100644 index 00000000000..fbd4130a8c3 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/config/security/java.security @@ -0,0 +1,118 @@ +security.provider.1=org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider +security.provider.2=org.bouncycastle.jsse.provider.BouncyCastleJsseProvider fips:BCFIPS +security.provider.3=SUN +security.provider.11=-BC + +securerandom.source=file:/dev/random +securerandom.strongAlgorithms=NativePRNGBlocking:SUN,DRBG:SUN +securerandom.drbg.config= + +login.configuration.provider=sun.security.provider.ConfigFile + +policy.provider=sun.security.provider.PolicyFile +policy.url.1=file:/etc/java/security/java.policy +policy.expandProperties=true +policy.allowSystemProperty=true +policy.ignoreIdentityScope=false + +keystore.type=bcfks +keystore.type.compat=true + +package.access=sun.misc.,\ + sun.reflect. +package.definition=sun.misc.,\ + sun.reflect. + +security.overridePropertiesFile=true + +ssl.KeyManagerFactory.algorithm=PKIX +ssl.TrustManagerFactory.algorithm=PKIX + +networkaddress.cache.negative.ttl=10 + +krb5.kdc.bad.policy = tryLast + +sun.security.krb5.disableReferrals=false +sun.security.krb5.maxReferrals=5 + +jdk.disabled.namedCurves = secp112r1, secp112r2, secp128r1, secp128r2, \ + secp160k1, secp160r1, secp160r2, secp192k1, secp192r1, secp224k1, \ + secp224r1, secp256k1, sect113r1, sect113r2, sect131r1, sect131r2, \ + sect163k1, sect163r1, sect163r2, sect193r1, sect193r2, sect233k1, \ + sect233r1, sect239k1, sect283k1, sect283r1, sect409k1, sect409r1, \ + sect571k1, sect571r1, X9.62 c2tnb191v1, X9.62 c2tnb191v2, \ + X9.62 c2tnb191v3, X9.62 c2tnb239v1, X9.62 c2tnb239v2, X9.62 c2tnb239v3, \ + X9.62 c2tnb359v1, X9.62 c2tnb431r1, X9.62 prime192v2, X9.62 prime192v3, \ + X9.62 prime239v1, X9.62 prime239v2, X9.62 prime239v3, brainpoolP256r1, \ + brainpoolP320r1, brainpoolP384r1, brainpoolP512r1 + +jdk.certpath.disabledAlgorithms=MD2, MD5, \ + RSA keySize < 1024, DSA keySize < 1024, EC keySize < 224, \ + SHA1, \ + secp112r1, secp112r2, secp128r1, secp128r2, \ + secp160k1, secp160r1, secp160r2, secp192k1, secp192r1, secp224k1, \ + secp224r1, secp256k1, sect113r1, sect113r2, sect131r1, sect131r2, \ + sect163k1, sect163r1, sect163r2, sect193r1, sect193r2, sect233k1, \ + sect233r1, sect239k1, sect283k1, sect283r1, sect409k1, sect409r1, \ + sect571k1, sect571r1, \ + brainpoolP256r1, brainpoolP320r1, brainpoolP384r1, brainpoolP512r1 + +jdk.security.legacyAlgorithms=SHA1, \ + RSA keySize < 2048, DSA keySize < 2048 + +jdk.jar.disabledAlgorithms=MD2, MD5, RSA keySize < 1024, \ + DSA keySize < 1024, SHA1, \ + secp112r1, secp112r2, secp128r1, secp128r2, \ + secp160k1, secp160r1, secp160r2, secp192k1, secp192r1, secp224k1, \ + secp224r1, secp256k1, sect113r1, sect113r2, sect131r1, sect131r2, \ + sect163k1, sect163r1, sect163r2, sect193r1, sect193r2, sect233k1, \ + sect233r1, sect239k1, sect283k1, sect283r1, sect409k1, sect409r1, \ + sect571k1, sect571r1, X9.62 c2tnb191v1, X9.62 c2tnb191v2, \ + X9.62 c2tnb191v3, X9.62 c2tnb239v1, X9.62 c2tnb239v2, X9.62 c2tnb239v3, \ + X9.62 c2tnb359v1, X9.62 c2tnb431r1, X9.62 prime192v2, X9.62 prime192v3, \ + X9.62 prime239v1, X9.62 prime239v2, X9.62 prime239v3, brainpoolP256r1, \ + brainpoolP320r1, brainpoolP384r1, brainpoolP512r1 + +jdk.tls.disabledAlgorithms=MD5, SSLv3, TLSv1, TLSv1.1, RC4, DES, MD5withRSA, \ + DH keySize < 1024, EC keySize < 224, 3DES_EDE_CBC, anon, NULL, \ + secp112r1, secp112r2, secp128r1, secp128r2, \ + secp160k1, secp160r1, secp160r2, secp192k1, secp192r1, secp224k1, \ + secp224r1, secp256k1, sect113r1, sect113r2, sect131r1, sect131r2, \ + sect163k1, sect163r1, sect163r2, sect193r1, sect193r2, sect233k1, \ + sect233r1, sect239k1, sect283k1, sect283r1, sect409k1, sect409r1, \ + sect571k1, sect571r1, brainpoolP256r1, \ + brainpoolP320r1, brainpoolP384r1, brainpoolP512r1 +jdk.tls.legacyAlgorithms= \ + K_NULL, C_NULL, M_NULL, \ + DH_anon, ECDH_anon, \ + RC4_128, RC4_40, DES_CBC, DES40_CBC, \ + 3DES_EDE_CBC +jdk.tls.keyLimits=AES/GCM/NoPadding KeyUpdate 2^37, \ + ChaCha20-Poly1305 KeyUpdate 2^37 + +crypto.policy=unlimited + +jdk.xml.dsig.secureValidationPolicy=\ + disallowAlg http://www.w3.org/TR/1999/REC-xslt-19991116,\ + disallowAlg http://www.w3.org/2001/04/xmldsig-more#rsa-md5,\ + disallowAlg http://www.w3.org/2001/04/xmldsig-more#hmac-md5,\ + disallowAlg http://www.w3.org/2001/04/xmldsig-more#md5,\ + maxTransforms 5,\ + maxReferences 30,\ + disallowReferenceUriSchemes file http https,\ + minKeySize RSA 1024,\ + minKeySize DSA 1024,\ + minKeySize EC 224,\ + noDuplicateIds,\ + noRetrievalMethodLoops + +jceks.key.serialFilter = java.base/java.lang.Enum;java.base/java.security.KeyRep;\ + java.base/java.security.KeyRep$Type;java.base/javax.crypto.spec.SecretKeySpec;!* + +jdk.sasl.disabledMechanisms=CRAM-MD5, DIGEST-MD5 +jdk.security.caDistrustPolicies=SYMANTEC_TLS +jdk.io.permissionsUseCanonicalPath=false + +jdk.tls.alpnCharset=ISO_8859_1 + +org.bouncycastle.fips.approved_only=true diff --git a/x-pack/distributions/internal/observabilitySRE/docker/Dockerfile b/x-pack/distributions/internal/observabilitySRE/docker/Dockerfile new file mode 100644 index 00000000000..317ee95ee22 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/docker/Dockerfile @@ -0,0 +1,90 @@ +# Start from the FIPS-compliant base image +FROM docker.elastic.co/wolfi/chainguard-base-fips:latest + +# Create logstash user and group first to ensure consistent UID/GID +# Inspired by https://github.com/elastic/ci-agent-images/blob/03f2adb3e749500017dd1c9dc08061556df43f6f/container-images/platform-ingest/logstash-ci-no-root/Dockerfile.py#L44C1-L47 +RUN addgroup -g 1002 logstash && \ + adduser -S -h /home/logstash -s /bin/bash -u 1002 -G logstash logstash + +# Install OpenJDK 21 +RUN apk add --no-cache \ + openjdk-21 \ + bash \ + git \ + curl \ + make \ + # CODEREVIEW: I think make, gcc and glibc-dev are all in build-base package if we want that + gcc \ + glibc-dev \ + openssl + +# Create directories with correct ownership +RUN mkdir -p /etc/java/security && \ + mkdir -p /home/logstash/.gradle && \ + chown -R logstash:logstash /home/logstash/.gradle && \ + chown -R logstash:logstash /etc/java/security + +# Copy JVM configuration files: TODO manage these consistently +COPY --chown=logstash:logstash x-pack/distributions/internal/observabilitySRE/config/security/java.security /etc/java/security/ +COPY --chown=logstash:logstash x-pack/distributions/internal/observabilitySRE/config/security/java.policy /etc/java/security/ + +# Create and set ownership of working directory +WORKDIR /logstash +RUN chown -R logstash:logstash /logstash + +# Switch to logstash user for remaining operations +USER logstash + +# Copy the local Logstash source with correct ownership +COPY --chown=logstash:logstash . . + +# Set environment variables +ENV JAVA_HOME=/usr/lib/jvm/java-21-openjdk +ENV PATH="${JAVA_HOME}/bin:${PATH}" + +# Initial build using JKS truststore +RUN ./gradlew clean bootstrap assemble installDefaultGems -PfedrampHighMode=true + +# Convert JKS to BCFKS for truststore and keystore +RUN keytool -importkeystore \ + -srckeystore $JAVA_HOME/lib/security/cacerts \ + -destkeystore /etc/java/security/cacerts.bcfks \ + -srcstoretype jks \ + -deststoretype bcfks \ + -providerpath /logstash/logstash-core/lib/jars/bc-fips-2.0.0.jar \ + -provider org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider \ + -deststorepass changeit \ + -srcstorepass changeit \ + -noprompt + +RUN keytool -importkeystore \ + -srckeystore $JAVA_HOME/lib/security/cacerts \ + -destkeystore /etc/java/security/keystore.bcfks \ + -srcstoretype jks \ + -deststoretype bcfks \ + -providerpath /logstash/logstash-core/lib/jars/bc-fips-2.0.0.jar \ + -provider org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider \ + -deststorepass changeit \ + -srcstorepass changeit \ + -noprompt + +ENV JAVA_SECURITY_PROPERTIES=/etc/java/security/java.security +ENV LS_JAVA_OPTS="\ + -Dio.netty.ssl.provider=JDK \ + # Enable debug logging for ensuring BCFIPS is being used if needed + # -Djava.security.debug=ssl,provider,certpath \ + -Djava.security.properties=${JAVA_SECURITY_PROPERTIES} \ + -Djavax.net.ssl.keyStore=/etc/java/security/keystore.bcfks \ + -Djavax.net.ssl.keyStoreType=BCFKS \ + -Djavax.net.ssl.keyStoreProvider=BCFIPS \ + -Djavax.net.ssl.keyStorePassword=changeit \ + -Djavax.net.ssl.trustStore=/etc/java/security/cacerts.bcfks \ + -Djavax.net.ssl.trustStoreType=BCFKS \ + -Djavax.net.ssl.trustStoreProvider=BCFIPS \ + -Djavax.net.ssl.trustStorePassword=changeit \ + -Dssl.KeyManagerFactory.algorithm=PKIX \ + -Dssl.TrustManagerFactory.algorithm=PKIX \ + -Dorg.bouncycastle.fips.approved_only=true" + +# Example test run, most use cases will override this +CMD ["./gradlew", "--info", "--stacktrace", "-PfedrampHighMode=true", "runIntegrationTests"] \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/plugin-allow-list.txt b/x-pack/distributions/internal/observabilitySRE/plugin-allow-list.txt new file mode 100644 index 00000000000..ee60a7bcc4d --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/plugin-allow-list.txt @@ -0,0 +1,18 @@ +logstash-codec-json +logstash-codec-multiline +logstash-codec-plain +logstash-codec-rubydebug +logstash-filter-age +logstash-filter-date +logstash-filter-drop +logstash-filter-fingerprint +logstash-filter-grok +logstash-filter-json +logstash-filter-mutate +logstash-input-beats +logstash-input-generator +logstash-input-pipeline +logstash-output-elasticsearch +logstash-output-pipeline +logstash-output-stdout +logstash-patterns-core diff --git a/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/.gitignore b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/.gitignore new file mode 100644 index 00000000000..dfc778790f2 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/.gitignore @@ -0,0 +1,2 @@ +*.gem +GEM_BUILD_VERSION diff --git a/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/lib/logstash/fips_validation.rb b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/lib/logstash/fips_validation.rb new file mode 100644 index 00000000000..231d5998f34 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/lib/logstash/fips_validation.rb @@ -0,0 +1,99 @@ + +require "logstash/environment" + +require "logstash/plugins/registry" + +module LogStash + class FipsValidation < LogStash::UniversalPlugin + + include LogStash::Util::Loggable + + require 'java' + java_import org.jruby.util.SafePropertyAccessor + + def register_hooks(hooks) + logger.debug("registering hooks") + require 'logstash/runner' + hooks.register_hooks(LogStash::Runner, self) + end + + def before_bootstrap_checks(runner) + logger.debug("running before_bootstrap_checks") + accumulator = Accumulator.new(self) + + # naive security provider check: specific three in specific order before any others + observed_security_providers = ::Java::java.security.Security.getProviders.map(&:name) + expected_security_providers = %w(BCFIPS BCJSSE SUN) + if observed_security_providers.first(3) == expected_security_providers + accumulator.success "Java security providers are properly configured (observed `#{observed_security_providers}`)" + else + accumulator.failure "Java security providers are misconfigured (expected `#{expected_security_providers}` to be first 3, observed `#{observed_security_providers}`)" + end + + # naive secure-random provider check: + observed_random_provider = ::Java::java.security.SecureRandom.new.getProvider.getName + expected_random_provider = "BCFIPS" + if observed_random_provider != expected_random_provider + accumulator.failure "Java SecureRandom provider is misconfigured (expected `#{expected_random_provider}`; observed `#{observed_random_provider}`)" + else + accumulator.success "Java SecureRandom provider is properly configured (observed `#{observed_random_provider}`)" + end + + # ensure Bouncycastle is configured and ready + begin + if Java::org.bouncycastle.crypto.CryptoServicesRegistrar.isInApprovedOnlyMode + accumulator.success "Bouncycastle Crypto is in `approved-only` mode" + else + accumulator.failure "Bouncycastle Crypto is not in 'approved-only' mode" + end + + if ::Java::org.bouncycastle.crypto.fips.FipsStatus.isReady + accumulator.success "Bouncycastle Crypto is fips-ready" + else + accumulator.failure "Bouncycastle Crypto is not fips-ready" + end + rescue => ex + accumulator.failure "Bouncycastle Crypto unavailable: (#{ex.class}) #{ex.message}" + end + + # ensure non-compliant jruby openssl provider isn't registered or eligible for later registration + if org.jruby.ext.openssl.SecurityHelper.isProviderRegistered + accumulator.failure "non-compliant Jruby OpenSSL security helper is registered" + elsif org.jruby.util.SafePropertyAccessor.getBoolean("jruby.openssl.provider.register") != false + accumulator.failure "non-compliant Jruby OpenSSL security helper is eligible to be registered" + else + accumulator.success "non-compliant Jruby OpenSSL security helper is correctly not registered" + end + + # hard-exit if there were _any_ failures + if accumulator.failure? + logger.fatal "Logstash is not configured in a FIPS-compliant manner" + exit 1 + end + + logger.info("FIPS OK") + end + + class Accumulator + def initialize(logger_context) + @logger = logger_context.logger + @success = [] + @failure = [] + end + + def success(message) + @success << message + @logger.info(message) + end + + def failure(message) + @failure << message + @logger.error(message) + end + + def failure? + @failure.any? + end + end + end +end diff --git a/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/lib/logstash_registry.rb b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/lib/logstash_registry.rb new file mode 100644 index 00000000000..e8c83a592a8 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/lib/logstash_registry.rb @@ -0,0 +1,4 @@ + +require_relative "logstash/fips_validation" + +LogStash::PLUGIN_REGISTRY.add(:universal, "fips_validation", LogStash::FipsValidation) diff --git a/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/logstash-integration-fips_validation.gemspec b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/logstash-integration-fips_validation.gemspec new file mode 100644 index 00000000000..4251a90e964 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/plugin/logstash-integration-fips_validation/logstash-integration-fips_validation.gemspec @@ -0,0 +1,38 @@ +# -*- encoding: utf-8 -*- + +gem_version_file = File.expand_path("GEM_BUILD_VERSION", __dir__) +unless File.exist?(gem_version_file) + File.write(gem_version_file, ENV.fetch("GEM_BUILD_VERSION")) +end + +Gem::Specification.new do |s| + s.name = File.basename(__FILE__, ".gemspec") + s.version = File.read(gem_version_file).chomp + s.licenses = ['Elastic-2.0'] + s.summary = "A logstash plugin that ensures FIPS 140-3 compliance" + s.description = <<~DESC + This plugin is to be included in Logstash distributions that need FedRAMP HIGH + FIPS 140-3 compliance; its hooks run before pipelines are loaded to ensure that + the process is running with the correct settings for cryptography. + DESC + s.authors = ["Elasticsearch"] + s.email = 'info@elasticsearch.com' + s.homepage = "http://www.elasticsearch.org/guide/en/logstash/current/index.html" + + s.require_paths = ["lib"] + + # Files + s.files = Dir::glob("lib/**/*.rb") | + Dir::glob("*.gemspec") | + Dir.glob("GEM_BUILD_VERSION") + + # Special flag to let us know this is actually a logstash plugin + s.metadata = { + "logstash_plugin" => "true", + "logstash_group" => "integration", + "integration_plugins" => "", # empty; no config-accessible plugins + } + + # Gem dependencies + s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99" +end diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/certs/.gitignore b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/certs/.gitignore new file mode 100644 index 00000000000..f98d0ee3250 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/certs/.gitignore @@ -0,0 +1,3 @@ +*.crt +*.csr +*.key diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/certs/generate.sh b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/certs/generate.sh new file mode 100755 index 00000000000..6493e96353e --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/certs/generate.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "Generating CA certificate" +openssl req -x509 -newkey rsa:3072 -days 365 -nodes -keyout ca.key -out ca.crt -subj "/CN=Elastic-CA" -sha256 + +echo "Generating Elasticsearch certificate" +openssl req -newkey rsa:3072 -nodes -keyout elasticsearch.key -out elasticsearch.csr -subj "/CN=elasticsearch" -sha256 +openssl x509 -req -in elasticsearch.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out elasticsearch.crt -days 365 -sha256 + +echo "Generating Logstash certificate" +openssl req -newkey rsa:3072 -nodes -keyout logstash.key -out logstash.csr -subj "/CN=logstash" -sha256 +openssl x509 -req -in logstash.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out logstash.crt -days 365 -sha256 + +echo "Generating Filebeat certificate" +openssl req -newkey rsa:3072 -nodes -keyout filebeat.key -out filebeat.csr -subj "/CN=filebeat" -sha256 +openssl x509 -req -in filebeat.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out filebeat.crt -days 365 -sha256 + +chmod 644 *.crt *.key diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/docker-compose.yml b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/docker-compose.yml new file mode 100644 index 00000000000..af08d3a8f81 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/docker-compose.yml @@ -0,0 +1,56 @@ +services: + logstash: + image: docker.elastic.co/logstash/logstash-observability-sre:${OBSERVABILITY_SRE_IMAGE_VERSION:-8.19.0-SNAPSHOT} + container_name: fips_test_logstash + ports: + - "5044:5044" + volumes: + - ./logstash/config/${LOGSTASH_CONFIG:-logstash-fips.yml}:/usr/share/logstash/config/logstash.yml + - ./logstash/pipeline/${LOGSTASH_PIPELINE:-logstash-to-elasticsearch.conf}:/usr/share/logstash/pipeline/logstash.conf + - ./certs:/usr/share/logstash/config/certs + networks: + - elastic + depends_on: + - elasticsearch + + elasticsearch: + image: docker.elastic.co/cloud-release/elasticsearch-cloud-ess-fips:${ELASTICSEARCH_IMAGE_VERSION:-8.19.0-SNAPSHOT} + container_name: fips_test_elasticsearch + ports: + - "9200:9200" + volumes: + - ./elasticsearch/config/${ELASTICSEARCH_CONFIG:-elasticsearch-fips.yml}:/usr/share/elasticsearch/config/elasticsearch.yml + - ./certs:/usr/share/elasticsearch/config/certs + environment: + - discovery.type=single-node + - ES_JAVA_OPTS=-Xms1g -Xmx1g + - ELASTIC_PASSWORD=changeme + networks: + - elastic + + filebeat: + # The filebeat shipped with the elasticsearch-fips container is built for FIPS support + # There is no stand alone distribution. This uses the shipped version for testing. + image: docker.elastic.co/cloud-release/elasticsearch-cloud-ess-fips:${FILEBEAT_IMAGE_VERSION:-8.19.0-SNAPSHOT} + container_name: fips_test_filebeat + working_dir: /usr/share/filebeat + entrypoint: ["/bin/bash", "-c"] + # Start Filebeat with /tmp for data (always writable) + command: + - | + exec /opt/filebeat/filebeat -e \ + --strict.perms=false \ + -c /usr/share/filebeat/filebeat.yml \ + --path.data /tmp/filebeat_data + volumes: + - ./filebeat/config/${FILEBEAT_CONFIG:-filebeat-fips.yml}:/usr/share/filebeat/filebeat.yml:ro + - ./filebeat/data:/data/logs:ro + - ./certs:/usr/share/filebeat/certs:ro + networks: + - elastic + depends_on: + - logstash + +networks: + elastic: + driver: bridge \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/elasticsearch/config/elasticsearch-fips.yml b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/elasticsearch/config/elasticsearch-fips.yml new file mode 100644 index 00000000000..a12caa80e9d --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/elasticsearch/config/elasticsearch-fips.yml @@ -0,0 +1,15 @@ +# Elasticsearch settings +discovery.type: single-node +http.port: 9200 +network.host: 0.0.0.0 +# Security settings +xpack.security.enabled: true +xpack.security.transport.ssl.enabled: true +xpack.security.transport.ssl.verification_mode: certificate +xpack.security.transport.ssl.key: /usr/share/elasticsearch/config/certs/elasticsearch.key +xpack.security.transport.ssl.certificate: /usr/share/elasticsearch/config/certs/elasticsearch.crt +xpack.security.transport.ssl.certificate_authorities: ["/usr/share/elasticsearch/config/certs/ca.crt"] +xpack.security.http.ssl.enabled: true +xpack.security.http.ssl.key: /usr/share/elasticsearch/config/certs/elasticsearch.key +xpack.security.http.ssl.certificate: /usr/share/elasticsearch/config/certs/elasticsearch.crt +xpack.security.http.ssl.certificate_authorities: ["/usr/share/elasticsearch/config/certs/ca.crt"] \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/filebeat/config/filebeat-fips.yml b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/filebeat/config/filebeat-fips.yml new file mode 100644 index 00000000000..61b9ee1cca2 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/filebeat/config/filebeat-fips.yml @@ -0,0 +1,20 @@ +filebeat.inputs: +- type: log + enabled: true + paths: + - /data/logs/sample_logs.txt + +output.logstash: + hosts: ["logstash:5044"] + ssl.enabled: true + ssl.certificate: "/usr/share/filebeat/certs/filebeat.crt" + ssl.key: "/usr/share/filebeat/certs/filebeat.key" + ssl.certificate_authorities: ["/usr/share/filebeat/certs/ca.crt"] + ssl.verification_mode: "certificate" + +# Add debugging +logging.level: debug +logging.to_stderr: true + +# Keep registry in the anonymous volume to avoid host pollution +path.data: /tmp/filebeat_data diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/filebeat/data/sample_logs.txt b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/filebeat/data/sample_logs.txt new file mode 100644 index 00000000000..45d5929c697 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/filebeat/data/sample_logs.txt @@ -0,0 +1 @@ +TEST-LOG: FIPS filebeat test message diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/config/logstash-fips.yml b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/config/logstash-fips.yml new file mode 100644 index 00000000000..5bef7567a5a --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/config/logstash-fips.yml @@ -0,0 +1,6 @@ +api.http.host: "0.0.0.0" +xpack.monitoring.enabled: false + +pipeline.ordered: false +pipeline.workers: 2 +pipeline.buffer.type: heap diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/filebeat-to-ls-to-es.conf b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/filebeat-to-ls-to-es.conf new file mode 100644 index 00000000000..fadcbce2578 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/filebeat-to-ls-to-es.conf @@ -0,0 +1,26 @@ +input { + beats { + port => 5044 + ssl_enabled => true + ssl_certificate => "/usr/share/logstash/config/certs/logstash.crt" + ssl_key => "/usr/share/logstash/config/certs/logstash.key" + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + } +} + +filter { + mutate { + add_tag => ["filebeat"] + } +} + +output { + elasticsearch { + hosts => ["https://elasticsearch:9200"] + user => "elastic" + password => "changeme" + ssl_enabled => true + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + index => "filebeat-test-%{+YYYY.MM.dd}" + } +} \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/filebeat-to-ls-weak.conf b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/filebeat-to-ls-weak.conf new file mode 100644 index 00000000000..4f40ecb1bc5 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/filebeat-to-ls-weak.conf @@ -0,0 +1,27 @@ +input { + beats { + port => 5044 + ssl_enabled => true + ssl_certificate => "/usr/share/logstash/config/certs/logstash.crt" + ssl_key => "/usr/share/logstash/config/certs/logstash.key" + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + ssl_supported_protocols => ["TLSv1.1"] + } +} + +filter { + mutate { + add_tag => ["filebeat"] + } +} + +output { + elasticsearch { + hosts => ["https://elasticsearch:9200"] + user => "elastic" + password => "changeme" + ssl_enabled => true + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + index => "filebeat-weak-ssl-test-%{+YYYY.MM.dd}" + } +} \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/logstash-to-elasticsearch-weak.conf b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/logstash-to-elasticsearch-weak.conf new file mode 100644 index 00000000000..8b5f612bbfa --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/logstash-to-elasticsearch-weak.conf @@ -0,0 +1,26 @@ +input { + generator { + lines => ["FIPS weak protocol test message"] + } +} + +filter { + mutate { + add_field => { + "fips_test" => "true" + } + } +} + +output { + elasticsearch { + hosts => ["https://elasticsearch:9200"] + user => "elastic" + password => "changeme" + ssl_enabled => true + ssl_verification_mode => "none" + ssl_supported_protocols => ["TLSv1.1"] + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + index => "logstash-weak-ssl-test-%{+YYYY.MM.dd}" + } +} \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/logstash-to-elasticsearch.conf b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/logstash-to-elasticsearch.conf new file mode 100644 index 00000000000..e960779e65c --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/docker/logstash/pipeline/logstash-to-elasticsearch.conf @@ -0,0 +1,30 @@ +input { + generator { + lines => ["FIPS compliance test message"] + } +} + +filter { + mutate { + add_field => { + "fips_test" => "true" + } + } +} + +output { + elasticsearch { + hosts => ["https://elasticsearch:9200"] + user => "elastic" + password => "changeme" + ssl_enabled => true + ssl_verification_mode => "full" + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + index => "logstash-fips-test-%{+YYYY.MM.dd}" + ssl_supported_protocols => ["TLSv1.2"] + } + + stdout { + codec => rubydebug + } +} \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/acceptance/spec/acceptance_tests_spec.rb b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/spec/acceptance_tests_spec.rb new file mode 100644 index 00000000000..c4e38b10950 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/acceptance/spec/acceptance_tests_spec.rb @@ -0,0 +1,186 @@ +require 'net/http' +require 'uri' +require 'json' +require 'timeout' + +describe "ObservabilitySRE FIPS container" do + def es_request(path, body = nil) + es_url = "https://localhost:9200" + es_user = 'elastic' + es_password = 'changeme' + uri = URI.parse(es_url + path) + http = Net::HTTP.new(uri.host, uri.port) + http.use_ssl = true + http.verify_mode = OpenSSL::SSL::VERIFY_NONE + + request = body ? Net::HTTP::Post.new(uri.request_uri) : Net::HTTP::Get.new(uri.request_uri) + request.basic_auth(es_user, es_password) + request["Content-Type"] = "application/json" + request.body = body if body + + http.request(request) + end + + def wait_until(timeout: 30, interval: 1, message: nil) + Timeout.timeout(timeout) do + loop do + break if yield + sleep interval + end + end + rescue Timeout::Error + raise message || "Condition not met within #{timeout} seconds" + end + + def wait_for_elasticsearch(max_retries = 120) + retries = 0 + ready = false + + while !ready && retries < max_retries + begin + response = es_request("/_cluster/health") + if response.code == "200" + health = JSON.parse(response.body) + if ["green", "yellow"].include?(health["status"]) + ready = true + end + end + rescue => e + puts "Waiting for Elasticsearch: #{e.message}" + ensure + unless ready + retries += 1 + sleep 1 + puts "Retry #{retries}/#{max_retries}" + end + end + end + + raise "System not ready after #{max_retries} seconds" unless ready + end + + def docker_compose_invoke(subcommand, env={}) + env_str = env.map{ |k,v| "#{k.to_s.upcase}=#{Shellwords.escape(v)} "}.join + work_dir = Pathname.new("#{__dir__}/../docker").cleanpath + command = "#{env_str}docker-compose --project-directory=#{Shellwords.escape(work_dir)} #{subcommand}" + system(command) or fail "Failed to invoke Docker Compose with command `#{command}`" + end + + def docker_compose_up(env={}) = docker_compose_invoke("up --detach", env) + + def docker_compose_down(env={}) = docker_compose_invoke("down --volumes", env) + + context "when running LS to ES with FIPS-compliant configuration" do + before(:all) do + docker_compose_up + wait_for_elasticsearch + end + + after(:all) do + docker_compose_down + end + + it "data flows from Logstash to Elasticsearch using FIPS-approved SSL" do + # Wait for index to appear, indicating data is flowing + wait_until(timeout: 30, message: "Index logstash-fips-test not found") do + response = es_request("/_cat/indices?v") + response.code == "200" && response.body.include?("logstash-fips-test") + end + # Wait until specific data from logstash generator/mutate filters are observed + query = { query: { match_all: {} } }.to_json + result = nil + wait_until(timeout: 30, message: "Index logstash-fips-test not found") do + response = es_request("/logstash-fips-test-*/_search", query) + result = JSON.parse(response.body) + response.code == "200" && result["hits"]["total"]["value"] > 0 + end + expect(result["hits"]["hits"].first["_source"]).to include("fips_test") + end + end + + context "when running LS to ES with non-FIPS compliant configuration" do + before(:all) do + docker_compose_up({"LOGSTASH_PIPELINE" => "logstash-to-elasticsearch-weak.conf"}) + wait_for_elasticsearch + end + + after(:all) do + docker_compose_down + end + + it "prevents data flow when using TLSv1.1 which is not FIPS-compliant" do + # Allow time for Logstash to attempt connections (and fail) + sleep 15 + + # Verify that no index has been created that would indicate successful data flow + response = es_request("/_cat/indices?v") + today_pattern = "logstash-weak-ssl-test-#{Time.now.strftime('%Y.%m.%d')}" + expect(response.body).not_to include(today_pattern) + + # Check logs for the specific BouncyCastle FIPS error we expect + logs = `docker logs fips_test_logstash 2>&1` + + # Verify the logs contain the FIPS-mode TLS protocol error + expect(logs).to include("No usable protocols enabled") + expect(logs).to include("IllegalStateException") + expect(logs).to include("org.bouncycastle") + end + end + + context "When running Filebeat through LS to ES in a FIPS compliant configuration" do + before(:all) do + docker_compose_up({"LOGSTASH_PIPELINE" => "filebeat-to-ls-to-es.conf"}) + wait_for_elasticsearch + end + + after(:all) do + docker_compose_down + end + + it "data flows from Filebeat through Logstash to Elasticsearch" do + # Wait for index to appear, indicating data is flowing + wait_until(timeout: 30, message: "Index filebeat-test not found") do + response = es_request("/_cat/indices?v") + response.code == "200" && response.body.include?("filebeat-test") + end + # Wait until specific data from filebeat/logstash mutate filters are observed + query = { query: { match_all: {} } }.to_json + result = nil + wait_until(timeout: 30, message: "Index filebeat-test not found") do + response = es_request("/filebeat-test-*/_search", query) + result = JSON.parse(response.body) + response.code == "200" && result["hits"]["total"]["value"] > 0 + end + expect(result["hits"]["hits"].first["_source"]["tags"]).to include("filebeat") + end + end + + context "when running Filebeat through LS to ES with non-FIPS compliant configuration" do + before(:all) do + docker_compose_up({"LOGSTASH_PIPELINE" => "filebeat-to-ls-weak.conf"}) + wait_for_elasticsearch + end + + after(:all) do + docker_compose_down + end + + it "prevents data flow when using TLSv1.1 which is not FIPS-compliant" do + # Allow time for Logstash to attempt connections (and fail) + sleep 15 + + # Verify that no index has been created that would indicate successful data flow + response = es_request("/_cat/indices?v") + today_pattern = "filebeat-weak-ssl-test" + expect(response.body).not_to include(today_pattern) + + # Check logs for the specific BouncyCastle FIPS error we expect + logs = `docker logs fips_test_logstash 2>&1` + + # Verify the logs contain the FIPS-mode TLS protocol error + expect(logs).to include("No usable protocols enabled") + expect(logs).to include("IllegalStateException") + expect(logs).to include("org.bouncycastle") + end + end +end \ No newline at end of file diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/certs/.gitignore b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/certs/.gitignore new file mode 100644 index 00000000000..f98d0ee3250 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/certs/.gitignore @@ -0,0 +1,3 @@ +*.crt +*.csr +*.key diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/certs/generate.sh b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/certs/generate.sh new file mode 100644 index 00000000000..6493e96353e --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/certs/generate.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "Generating CA certificate" +openssl req -x509 -newkey rsa:3072 -days 365 -nodes -keyout ca.key -out ca.crt -subj "/CN=Elastic-CA" -sha256 + +echo "Generating Elasticsearch certificate" +openssl req -newkey rsa:3072 -nodes -keyout elasticsearch.key -out elasticsearch.csr -subj "/CN=elasticsearch" -sha256 +openssl x509 -req -in elasticsearch.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out elasticsearch.crt -days 365 -sha256 + +echo "Generating Logstash certificate" +openssl req -newkey rsa:3072 -nodes -keyout logstash.key -out logstash.csr -subj "/CN=logstash" -sha256 +openssl x509 -req -in logstash.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out logstash.crt -days 365 -sha256 + +echo "Generating Filebeat certificate" +openssl req -newkey rsa:3072 -nodes -keyout filebeat.key -out filebeat.csr -subj "/CN=filebeat" -sha256 +openssl x509 -req -in filebeat.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out filebeat.crt -days 365 -sha256 + +chmod 644 *.crt *.key diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/docker-compose.yml b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/docker-compose.yml new file mode 100644 index 00000000000..aaeb16e4efb --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/docker-compose.yml @@ -0,0 +1,57 @@ +version: '3' + +services: + elasticsearch: + image: docker.elastic.co/cloud-release/elasticsearch-cloud-ess-fips:8.19.0-SNAPSHOT + environment: + - discovery.type=single-node + - xpack.security.enabled=true + - ELASTIC_PASSWORD=changeme + - ES_JAVA_OPTS=-Xms512m -Xmx512m + - xpack.security.http.ssl.enabled=true + - xpack.security.http.ssl.key=/usr/share/elasticsearch/config/certs/elasticsearch.key + - xpack.security.http.ssl.certificate=/usr/share/elasticsearch/config/certs/elasticsearch.crt + - xpack.security.http.ssl.certificate_authorities=/usr/share/elasticsearch/config/certs/ca.crt + - xpack.security.transport.ssl.enabled=true + - xpack.security.transport.ssl.key=/usr/share/elasticsearch/config/certs/elasticsearch.key + - xpack.security.transport.ssl.certificate=/usr/share/elasticsearch/config/certs/elasticsearch.crt + - xpack.security.transport.ssl.certificate_authorities=/usr/share/elasticsearch/config/certs/ca.crt + ports: + - "9200:9200" + volumes: + - ./certs:/usr/share/elasticsearch/config/certs + networks: + - smoketest + + logstash: + # We build the observability SRE image with the gradle task, but then tag it + # as this in CI to ensure we are getting the local one built from the PR and not from + # the container registry + image: pr-built-observability-sre-image + volumes: + - ./logstash/pipeline:/usr/share/logstash/pipeline + - ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml + - ./certs:/usr/share/logstash/config/certs + ports: + - "5044:5044" + depends_on: + - elasticsearch + networks: + - smoketest + + filebeat: + image: docker.elastic.co/beats/filebeat:8.19.0-SNAPSHOT + # Test runner mounts volume with non root user, do not require this file be root + entrypoint: "filebeat -e --strict.perms=false" + volumes: + - ./filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro + - ./certs:/usr/share/filebeat/certs + - ./test-logs:/test-logs:ro + depends_on: + - logstash + networks: + - smoketest + +networks: + smoketest: + driver: bridge diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/filebeat/filebeat.yml b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/filebeat/filebeat.yml new file mode 100644 index 00000000000..34981f1cc80 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/filebeat/filebeat.yml @@ -0,0 +1,15 @@ +filebeat.inputs: +- type: log + enabled: true + paths: + - /test-logs/*.log + +output.logstash: + hosts: ["logstash:5044"] + ssl: + enabled: true + certificate_authorities: ["/usr/share/filebeat/certs/ca.crt"] + certificate: "/usr/share/filebeat/certs/filebeat.crt" + key: "/usr/share/filebeat/certs/filebeat.key" + verification_mode: "full" + supported_protocols: ["TLSv1.2"] diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/logstash/config/logstash.yml b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/logstash/config/logstash.yml new file mode 100644 index 00000000000..5bef7567a5a --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/logstash/config/logstash.yml @@ -0,0 +1,6 @@ +api.http.host: "0.0.0.0" +xpack.monitoring.enabled: false + +pipeline.ordered: false +pipeline.workers: 2 +pipeline.buffer.type: heap diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/logstash/pipeline/logstash.conf b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/logstash/pipeline/logstash.conf new file mode 100644 index 00000000000..d8d677b0603 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/logstash/pipeline/logstash.conf @@ -0,0 +1,71 @@ +input { + beats { + port => 5044 + ssl_enabled => true + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + ssl_certificate => "/usr/share/logstash/config/certs/logstash.crt" + ssl_key => "/usr/share/logstash/config/certs/logstash.key" + ssl_client_authentication => "required" + ssl_supported_protocols => ["TLSv1.2"] + } +} + +filter { + grok { + match => { "message" => "TEST-LOG: %{GREEDYDATA:log_content}" } + } + + if [log_content] =~ /timestamp=/ { + grok { + match => { "log_content" => ".*timestamp=%{TIMESTAMP_ISO8601:timestamp}.*" } + } + date { + match => [ "timestamp", "ISO8601" ] + target => "@timestamp" + } + } + + age {} + if [@metadata][age] > 86400 { + mutate { + add_tag => ["old_event"] + } + } + + if [log_content] =~ /DEBUG/ { + drop { } + } + + if [log_content] =~ /json=/ { + grok { + match => { "log_content" => "json=%{GREEDYDATA:json_string}" } + } + json { + source => "json_string" + target => "parsed_json" + } + } + + fingerprint { + source => ["message"] + target => "fingerprint" + method => "MD5" + } + + mutate { + add_field => { "environment" => "test" } + } +} + +output { + elasticsearch { + hosts => ["https://elasticsearch:9200"] + user => "elastic" + password => "changeme" + ssl_enabled => true + ssl_verification_mode => "full" + ssl_certificate_authorities => ["/usr/share/logstash/config/certs/ca.crt"] + index => "logs-%{+YYYY.MM.dd}" + ssl_supported_protocols => ["TLSv1.2"] + } +} diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/test-logs/test.log b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/test-logs/test.log new file mode 100644 index 00000000000..049dda0d9b2 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/docker/test-logs/test.log @@ -0,0 +1,7 @@ +TEST-LOG: Simple example log entry +TEST-LOG: Error message with status=500 +TEST-LOG: Log with json={"user":"testuser","action":"login","status":"success"} +TEST-LOG: Log with json={"user":"admin","action":"update","items":5,"details":{"category":"config","changed":true}} +TEST-LOG: Log with timestamp=2025-04-01T12:00:00Z for testing date filter +TEST-LOG: Debug log message DEBUG should be dropped +TEST-LOG: Log with timestamp=2024-04-01T12:00:00Z should be tagged as old_event diff --git a/x-pack/distributions/internal/observabilitySRE/qa/smoke/spec/smoke_test_spec.rb b/x-pack/distributions/internal/observabilitySRE/qa/smoke/spec/smoke_test_spec.rb new file mode 100644 index 00000000000..38b36297804 --- /dev/null +++ b/x-pack/distributions/internal/observabilitySRE/qa/smoke/spec/smoke_test_spec.rb @@ -0,0 +1,238 @@ +require 'json' +require 'net/http' +require 'uri' +require 'openssl' + +describe "Observability SRE smoke tests" do + before(:all) do + @es_url = "https://localhost:9200" + @es_user = "elastic" + @es_password = "changeme" + + max_retries = 120 + retries = 0 + ready = false + + while !ready && retries < max_retries + begin + # Check cluster health first + response = es_request("/_cluster/health") + if response.code == "200" + health = JSON.parse(response.body) + if ["green", "yellow"].include?(health["status"]) + # Wait for logs-* index to be created and have documents + logs_response = es_request("/logs-*/_count") + if logs_response.code == "200" + count_data = JSON.parse(logs_response.body) + if count_data["count"] > 0 + ready = true + puts "Found #{count_data["count"]} documents in logs index" + else + puts "Waiting for documents in logs index..." + end + end + end + end + rescue => e + puts "Waiting for Elasticsearch/Logstash: #{e.message}" + ensure + unless ready + retries += 1 + sleep 1 + puts "Retry #{retries}/#{max_retries}" + end + end + end + + raise "System not ready after #{max_retries} seconds" unless ready + end + def es_request(path, body = nil) + uri = URI.parse(@es_url + path) + http = Net::HTTP.new(uri.host, uri.port) + http.use_ssl = true + http.verify_mode = OpenSSL::SSL::VERIFY_NONE + + if body + request = Net::HTTP::Post.new(uri.request_uri) + request.body = body + else + request = Net::HTTP::Get.new(uri.request_uri) + end + + request.basic_auth(@es_user, @es_password) + request["Content-Type"] = "application/json" + + http.request(request) + end + + context "Log ingestion" do + let(:query) do + JSON.generate({ + "size": 10, + "query": { + "prefix": { + "message.keyword": "TEST-LOG" + } + } + }) + end + + it "ingests logs from Filebeat" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + expect(response.code).to eq("200") + expect(data["hits"]["total"]["value"]).to be > 0 + end + end + + context "JSON filter" do + let(:query) do + JSON.generate({ + "size": 5, + "query": { + "exists": { + "field": "parsed_json" + } + } + }) + end + + it "applies JSON filter" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + expect(response.code).to eq("200") + expect(data["hits"]["total"]["value"]).to be > 0 + + hit = data["hits"]["hits"].first + expect(hit["_source"]["parsed_json"]).to be_a(Hash) + end + end + + context "Date filter" do + let(:query) do + JSON.generate({ + "size": 10, + "_source": ["message", "@timestamp", "timestamp"], + "query": { + "match_phrase": { + "message": "timestamp" + } + } + }) + end + + it "applies date filter" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + # Find logs with timestamps in their content + timestamp_logs = data["hits"]["hits"].select do |hit| + hit["_source"]["message"] && hit["_source"]["message"].include?("timestamp=") + end + + # Verify that the timestamp was extracted and used + expect(timestamp_logs).not_to be_empty + timestamp_logs.each do |log| + if log["_source"]["timestamp"] + timestamp_without_ms = log["_source"]["@timestamp"].gsub('.000Z', 'Z') + expect(timestamp_without_ms).to eq(log["_source"]["timestamp"]) + end + end + end + end + + context "Age filter" do + let(:query) do + JSON.generate({ + "size": 5, + "_source": ["message", "@timestamp", "tags"], + "query": { + "match": { + "tags": "old_event" + } + } + }) + end + + it "tags old events" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + expect(data["hits"]["total"]["value"]).to be > 0 + data["hits"]["hits"].each do |hit| + expect(hit["_source"]["tags"]).to include("old_event") + end + end + end + + context "Drop filter" do + let(:query) do + JSON.generate({ + "size": 5, + "query": { + "match_phrase": { + "message": "DEBUG" + } + } + }) + end + + it "drops DEBUG logs" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + expect(data["hits"]["total"]["value"]).to eq(0) + end + end + + context "Fingerprint filter" do + let(:query) do + JSON.generate({ + "size": 5, + "_source": ["message", "fingerprint"], + "query": { + "exists": { + "field": "fingerprint" + } + } + }) + end + + it "addsfingerprints to logs" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + expect(data["hits"]["total"]["value"]).to be > 0 + data["hits"]["hits"].each do |hit| + expect(hit["_source"]["fingerprint"]).to be_a(String) + expect(hit["_source"]["fingerprint"].length).to eq(32) # MD5 is 32 chars + end + end + end + + context "Mutate filter" do + let(:query) do + JSON.generate({ + "size": 5, + "_source": ["message", "environment"], + "query": { + "exists": { + "field": "environment" + } + } + }) + end + + it "adds environment field via mutate" do + response = es_request("/logs-*/_search?pretty", query) + data = JSON.parse(response.body) + + expect(data["hits"]["total"]["value"]).to be > 0 + data["hits"]["hits"].each do |hit| + expect(hit["_source"]["environment"]).to eq("test") + end + end + end +end \ No newline at end of file diff --git a/x-pack/qa/integration/fips-validation/logstash-integration-fips-validation_spec.rb b/x-pack/qa/integration/fips-validation/logstash-integration-fips-validation_spec.rb new file mode 100644 index 00000000000..02da412ec4e --- /dev/null +++ b/x-pack/qa/integration/fips-validation/logstash-integration-fips-validation_spec.rb @@ -0,0 +1,40 @@ +require_relative "../spec_helper" + +context "FipsValidation Integration Plugin" do + + context "when running on stock Logstash", :skip_fips do + # on non-FIPS Logstash, we need to install the plugin ourselves + before(:all) do + logstash_home = Pathname.new(get_logstash_path).cleanpath + build_dir = (logstash_home / "build" / "gems") + gems = build_dir.glob("logstash-integration-fips_validation-*.gem") + fail("No FipsValidation Gem in #{build_dir}") if gems.none? + fail("Multiple FipsValidation Gems in #{build_dir}") if gems.size > 1 + fips_validation_plugin = gems.first + + response = logstash_plugin("install", fips_validation_plugin.to_s) + aggregate_failures('setup') do + expect(response).to be_successful + expect(response.stdout_lines.map(&:chomp)).to include("Installation successful") + end + end + after(:all) do + response = logstash_plugin("remove", "logstash-integration-fips_validation") + expect(response).to be_successful + end + it "prevents Logstash from running and logs helpful guidance" do + process = logstash_with_empty_default("bin/logstash --log.level=debug -e 'input { generator { count => 1 } }'", timeout: 60) + + aggregate_failures do + expect(process).to_not be_successful + process.stdout_lines.join.tap do |stdout| + expect(stdout).to_not include("Pipeline started") + expect(stdout).to include("Java security providers are misconfigured") + expect(stdout).to include("Java SecureRandom provider is misconfigured") + expect(stdout).to include("Bouncycastle Crypto unavailable") + expect(stdout).to include("Logstash is not configured in a FIPS-compliant manner") + end + end + end + end +end \ No newline at end of file diff --git a/x-pack/qa/integration/spec_helper.rb b/x-pack/qa/integration/spec_helper.rb index 974092b02af..f5dbbaef543 100644 --- a/x-pack/qa/integration/spec_helper.rb +++ b/x-pack/qa/integration/spec_helper.rb @@ -9,3 +9,9 @@ require_relative "support/elasticsearch/api/actions/update_password" require "json" require "json-schema" + +RSpec.configure do |c| + if java.lang.System.getProperty("org.bouncycastle.fips.approved_only") == "true" + c.filter_run_excluding skip_fips: true + end +end \ No newline at end of file diff --git a/x-pack/qa/integration/support/helpers.rb b/x-pack/qa/integration/support/helpers.rb index 0122b8dbab1..8cbb40bd5dd 100644 --- a/x-pack/qa/integration/support/helpers.rb +++ b/x-pack/qa/integration/support/helpers.rb @@ -164,6 +164,14 @@ def logstash_with_empty_default(cmd, options = {}, default_settings = {}) Belzebuth.run(cmd, {:directory => get_logstash_path }.merge(options.fetch(:belzebuth, { }))) end +def logstash_plugin(command, *args) + cmd = Shellwords.join(["bin/logstash-plugin", command, *args]) + pwd = Pathname.new(get_logstash_path).cleanpath.to_s + + puts "Running logstash plugin manager with `#{cmd}` in `#{pwd}`" + Belzebuth.run(cmd, directory: pwd, timeout: 60) +end + def verify_response!(cmd, response) unless response.successful? raise "Something went wrong when installing xpack,\ncmd: #{cmd}\nresponse: #{response}" diff --git a/x-pack/src/test/java/org/logstash/xpack/test/RSpecObservabilitySREAcceptanceTests.java b/x-pack/src/test/java/org/logstash/xpack/test/RSpecObservabilitySREAcceptanceTests.java new file mode 100644 index 00000000000..04495429051 --- /dev/null +++ b/x-pack/src/test/java/org/logstash/xpack/test/RSpecObservabilitySREAcceptanceTests.java @@ -0,0 +1,18 @@ +package org.logstash.xpack.test; + +import org.junit.Test; +import java.util.Arrays; +import java.util.List; + +public class RSpecObservabilitySREAcceptanceTests extends RSpecTests { + @Override + protected List rspecArgs() { + return Arrays.asList("-fd", "distributions/internal/observabilitySRE/qa/acceptance/spec"); + } + + @Test + @Override + public void rspecTests() throws Exception { + super.rspecTests(); + } +} \ No newline at end of file diff --git a/x-pack/src/test/java/org/logstash/xpack/test/RSpecObservabilitySRETests.java b/x-pack/src/test/java/org/logstash/xpack/test/RSpecObservabilitySRETests.java new file mode 100644 index 00000000000..c450763d123 --- /dev/null +++ b/x-pack/src/test/java/org/logstash/xpack/test/RSpecObservabilitySRETests.java @@ -0,0 +1,18 @@ +package org.logstash.xpack.test; + +import org.junit.Test; +import java.util.Arrays; +import java.util.List; + +public class RSpecObservabilitySRETests extends RSpecTests { + @Override + protected List rspecArgs() { + return Arrays.asList("-fd", "distributions/internal/observabilitySRE/qa/smoke/spec"); + } + + @Test + @Override + public void rspecTests() throws Exception { + super.rspecTests(); + } +} \ No newline at end of file