Merge branch 'release/3.5.25' #392
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: build | |
| on: | |
| push: | |
| branches: | |
| - develop | |
| tags: | |
| - "*" | |
| workflow_dispatch: | |
| schedule: | |
| # Every Month, the first day at 8:42 | |
| - cron: "42 8 1 * *" | |
| jobs: | |
| generate-matrix: | |
| name: Generate Matrix | |
| runs-on: ubuntu-latest | |
| outputs: | |
| analyzers_matrix: ${{ steps.set-matrix.outputs.analyzers_matrix }} | |
| responders_matrix: ${{ steps.set-matrix.outputs.responders_matrix }} | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: List analyzer and responder JSON files and build matrices | |
| id: set-matrix | |
| run: | | |
| echo "Building analyzers matrix..." | |
| # define your lists of allowed directories for analyzers and responders for targeted build | |
| # If you want to disable filtering, set the corresponding flag to "false" | |
| filter_analyzers="false" | |
| filter_responders="false" | |
| allowed_analyzers="Hippocampe|EmlParser|ClamAV|FileInfo" | |
| allowed_responders="MSDefenderOffice365" | |
| # If filtering is disabled, use a regex that matches everything. | |
| if [ "$filter_analyzers" != "true" ]; then | |
| allowed_analyzers=".*" | |
| fi | |
| if [ "$filter_responders" != "true" ]; then | |
| allowed_responders=".*" | |
| fi | |
| echo "Building analyzers matrix..." | |
| analyzers_matrix=$(find analyzers -type f -name '*.json' -printf '%P\n' | \ | |
| grep -E "^($allowed_analyzers)/" | \ | |
| jq -R -s -c 'split("\n")[:-1] | map({directory: (split("/")[0]), path: .}) | {include: .}') | |
| echo "Building responders matrix..." | |
| responders_matrix=$(find responders -type f -name '*.json' -printf '%P\n' | \ | |
| grep -E "^($allowed_responders)/" | \ | |
| jq -R -s -c 'split("\n")[:-1] | map({directory: (split("/")[0]), path: .}) | {include: .}') | |
| echo "Generated analyzers matrix: $analyzers_matrix" | |
| echo "Generated responders matrix: $responders_matrix" | |
| { | |
| echo "analyzers_matrix<<EOF" | |
| echo "$analyzers_matrix" | |
| echo "EOF" | |
| } >> "$GITHUB_OUTPUT" | |
| { | |
| echo "responders_matrix<<EOF" | |
| echo "$responders_matrix" | |
| echo "EOF" | |
| } >> "$GITHUB_OUTPUT" | |
| build_analyzers: | |
| name: Build Analyzers | |
| needs: generate-matrix | |
| runs-on: ubuntu-latest | |
| continue-on-error: true | |
| strategy: | |
| max-parallel: 20 | |
| matrix: ${{ fromJson(needs.generate-matrix.outputs.analyzers_matrix) }} | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Set up QEMU | |
| uses: docker/setup-qemu-action@v3 | |
| - name: Set up Docker Buildx | |
| uses: docker/setup-buildx-action@v3 | |
| - name: GHCR Login | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: ${{ github.actor }} | |
| password: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Convert directory to lowercase | |
| id: lowercase_dir | |
| run: | | |
| lower_dir=$(echo "${{ matrix.directory }}" | tr '[:upper:]' '[:lower:]') | |
| echo "lower_dir=${lower_dir}" >> $GITHUB_ENV | |
| - name: Set lowercase repository owner | |
| run: | | |
| owner="${{ github.repository_owner }}" | |
| lower_owner=$(echo "$owner" | tr '[:upper:]' '[:lower:]') | |
| echo "LOWER_REPO_OWNER=$lower_owner" >> $GITHUB_ENV | |
| - name: Parse JSON and set environment variables from matrix.path (using jq) | |
| id: parse_json | |
| run: | | |
| json_file="./analyzers/${{ matrix.path }}" | |
| if [ -f "$json_file" ]; then | |
| lower_name=$(jq -r '.name | ascii_downcase' "$json_file") | |
| version=$(jq -r '.version // empty' "$json_file") | |
| description=$(jq -r '.description // empty' "$json_file") | |
| command=$(jq -r '.command // empty' "$json_file") | |
| echo "LOWERCASE_NAME=${lower_name}" >> $GITHUB_ENV | |
| echo "VERSION=${version}" >> $GITHUB_ENV | |
| echo "DESCRIPTION=${description}" >> $GITHUB_ENV | |
| echo "COMMAND=${command}" >> $GITHUB_ENV | |
| if [[ "$version" == *.* ]]; then | |
| version_split=$(echo "$version" | cut -d '.' -f 1) | |
| echo "VERSION_SPLIT=${version_split}" >> $GITHUB_ENV | |
| else | |
| echo "VERSION_SPLIT=${version}" >> $GITHUB_ENV | |
| fi | |
| else | |
| echo "File not found: $json_file" | |
| exit 1 | |
| fi | |
| - name: Check and create Dockerfile if not present | |
| run: | | |
| dockerfile_path="analyzers/${{ matrix.directory }}/Dockerfile" | |
| matrix_directory="${{ matrix.directory }}" | |
| command_value="${{ env.COMMAND }}" | |
| # Add multiple workers separated by spaces | |
| special_alpine_workers="PaloAltoNGFW Worker2 Worker3 AnotherWorker" | |
| if [ ! -f "$dockerfile_path" ]; then | |
| echo "Dockerfile not found in $dockerfile_path. Creating one..." | |
| #echo "FROM python:3-alpine" > "$dockerfile_path" | |
| #echo "RUN apk add --no-cache openssl ca-certificates" >> "$dockerfile_path" | |
| echo "FROM python:3-slim" > "$dockerfile_path" | |
| # echo "RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates && rm -rf /var/lib/apt/lists/*" >> "$dockerfile_path" | |
| # Check if current worker is among special alpine workers | |
| if echo "$special_alpine_workers" | grep -qw "$matrix_directory"; then | |
| # echo "RUN apk add --no-cache file-dev && rm -rf /var/cache/apk/*" >> "$dockerfile_path" | |
| echo "RUN apt-get update && apt-get install -y --no-install-recommends libmagic1 && rm -rf /var/lib/apt/lists/*" >> "$dockerfile_path" | |
| fi | |
| echo "WORKDIR /worker" >> "$dockerfile_path" | |
| echo "COPY requirements.txt ${matrix_directory}/" >> "$dockerfile_path" | |
| echo "RUN test ! -e ${matrix_directory}/requirements.txt || pip install --no-cache-dir -r ${matrix_directory}/requirements.txt" >> "$dockerfile_path" | |
| echo "COPY . ${matrix_directory}/" >> "$dockerfile_path" | |
| echo "ENTRYPOINT [\"python\", \"${command_value}\"]" >> "$dockerfile_path" | |
| else | |
| echo "Dockerfile exists: $dockerfile_path" | |
| fi | |
| - name: Check if image needs rebuild | |
| id: check-rebuild | |
| run: | | |
| image="ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.VERSION }}" | |
| current_sha="${{ github.sha }}" | |
| token="${{ secrets.GITHUB_TOKEN }}" | |
| # Fetch image manifest from GHCR | |
| manifest_response=$(curl -sSL \ | |
| -H "Authorization: Bearer $token" \ | |
| -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \ | |
| "https://ghcr.io/v2/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}/manifests/${{ env.VERSION }}") | |
| # Check if the manifest response contains a valid digest | |
| labels=$(echo "$manifest_response" | jq -r '.config.digest // empty') | |
| if [[ -z "$labels" ]]; then | |
| echo "No existing image or unable to fetch manifest. rebuild needed" | |
| echo "rebuild=true" >> $GITHUB_OUTPUT | |
| exit 0 | |
| fi | |
| # Fetch image config blob to extract labels | |
| config_response=$(curl -sSL \ | |
| -H "Authorization: Bearer $token" \ | |
| "https://ghcr.io/v2/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}/blobs/$labels") | |
| # Extract image label safely | |
| image_labels=$(echo "$config_response" | jq -r '.config.Labels["org.opencontainers.image.revision"] // empty') | |
| # Debugging: print values | |
| echo "current_sha: $current_sha" | |
| echo "image_labels: $image_labels" | |
| if [[ "$image_labels" == "$current_sha" ]]; then | |
| echo "No rebuild needed. SHA matches: $current_sha" | |
| echo "rebuild=false" >> $GITHUB_OUTPUT | |
| else | |
| echo "SHA mismatch or missing label. rebuild needed" | |
| echo "rebuild=true" >> $GITHUB_OUTPUT | |
| fi | |
| - name: Set build date | |
| id: build_date | |
| run: echo "date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> "$GITHUB_OUTPUT" | |
| - name: Set Image Tags | |
| run: | | |
| if [[ "${{ github.ref }}" =~ ^refs/tags/ ]] || [ "${{ github.event_name }}" == "schedule" ]; then | |
| # Keep one image tag for import testing | |
| echo "IMAGE_TAG=${{ env.VERSION_SPLIT }}" >> $GITHUB_ENV | |
| # Produce three tags: | |
| # 1) :VERSION (e.g. 2.0) | |
| # 2) :VERSION_SPLIT (e.g. 2) | |
| # 3) :latest | |
| echo "IMAGE_TAGS=ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.VERSION }},ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.VERSION_SPLIT }}" >> $GITHUB_ENV | |
| else | |
| echo "IMAGE_TAG=devel" >> $GITHUB_ENV | |
| # Only the :devel tag | |
| echo "IMAGE_TAGS=ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:devel" >> $GITHUB_ENV | |
| fi | |
| - name: Set Platforms | |
| id: set_platforms | |
| run: | | |
| NO_ARM64_DIRS="FileInfo" | |
| CURRENT_DIR="${{ matrix.directory }}" | |
| # Default to multi-arch | |
| PLATFORMS="linux/amd64,linux/arm64" | |
| # Check if CURRENT_DIR is in the NO_ARM64_DIRS list | |
| if echo "$NO_ARM64_DIRS" | grep -qw "$CURRENT_DIR"; then | |
| echo "Directory '$CURRENT_DIR' is in NO_ARM64_DIRS; limiting to linux/amd64 only." | |
| PLATFORMS="linux/amd64" | |
| fi | |
| echo "PLATFORMS=$PLATFORMS" >> $GITHUB_ENV | |
| - name: Build and push multi-arch image to GHCR | |
| if: steps.check-rebuild.outputs.rebuild == 'true' | |
| uses: docker/build-push-action@v6 | |
| with: | |
| context: analyzers/${{ matrix.directory }} | |
| file: ./analyzers/${{ matrix.directory }}/Dockerfile | |
| platforms: ${{ env.PLATFORMS }} | |
| push: true | |
| tags: ${{ env.IMAGE_TAGS }} | |
| labels: | | |
| org.opencontainers.image.created=${{ steps.build_date.outputs.date }} | |
| org.opencontainers.image.title=${{ env.LOWERCASE_NAME }} | |
| org.opencontainers.image.description=${{ env.DESCRIPTION }} | |
| org.opencontainers.image.url=https://thehive-project.org | |
| org.opencontainers.image.source=https://github.com/TheHive-Project/Cortex-Analyzers | |
| org.opencontainers.image.revision=${{ github.sha }} | |
| org.opencontainers.image.vendor=TheHive Project | |
| org.opencontainers.image.version=${{ env.VERSION }} | |
| annotations: | | |
| org.opencontainers.image.description=${{ env.DESCRIPTION }} | |
| org.opencontainers.image.source=https://github.com/${{ github.repository }} | |
| org.opencontainers.image.revision=${{ github.sha }} | |
| org.opencontainers.image.title=${{ env.LOWERCASE_NAME }} | |
| org.opencontainers.image.url=https://thehive-project.org | |
| org.opencontainers.image.version=${{ env.VERSION }} | |
| - name: Scan image for vulnerabilities (Trivy) | |
| uses: aquasecurity/trivy-action@0.32.0 | |
| with: | |
| image-ref: ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.IMAGE_TAG }} | |
| format: table | |
| vuln-type: 'os,library' | |
| severity: 'CRITICAL,HIGH' | |
| exit-code: 0 | |
| ignore-unfixed: true | |
| - name: Test imports in the container (amd64) | |
| if: ${{ steps.check-rebuild.outputs.rebuild == 'true' && contains(env.PLATFORMS, 'linux/amd64') }} | |
| run: | | |
| python <<EOF | |
| import subprocess | |
| import sys | |
| import textwrap | |
| image_tag = "ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.IMAGE_TAG }}" | |
| command = "${{ env.COMMAND }}" | |
| worker_name = "${{ matrix.directory }}" | |
| print(f"π Testing (amd64) Python imports in built image '{image_tag}' for worker '{worker_name}'...") | |
| # in-container python snippet (AST-based import checking): | |
| test_code = textwrap.dedent(f''' | |
| import os, sys, ast, os.path as osp | |
| entrypoint_full = "{command}" | |
| fallback_dir = "{worker_name}" | |
| if "/" in entrypoint_full: | |
| dir_part = osp.dirname(entrypoint_full) | |
| file_part = osp.basename(entrypoint_full) | |
| # If directory doesn't exist but fallback_dir does, change to fallback. | |
| if not osp.isdir(dir_part) and osp.isdir(fallback_dir): | |
| dir_part = fallback_dir | |
| os.chdir(osp.join(os.getcwd(), dir_part)) | |
| entrypoint = file_part | |
| else: | |
| entrypoint = entrypoint_full | |
| if not osp.exists(entrypoint): | |
| print("β ERROR: {{}} not found inside the container.".format(entrypoint)) | |
| sys.exit(1) | |
| with open(entrypoint, 'r', encoding='utf-8') as f: | |
| source = f.read() | |
| try: | |
| tree = ast.parse(source) | |
| except SyntaxError as e: | |
| print(f"β Syntax error in {{entrypoint}}: {{e}}") | |
| sys.exit(1) | |
| imports = [] | |
| for node in ast.walk(tree): | |
| if isinstance(node, ast.Import): | |
| for alias in node.names: | |
| imports.append(alias.name) | |
| elif isinstance(node, ast.ImportFrom): | |
| if node.module: | |
| imports.append(node.module) | |
| print("π Checking Python imports from", entrypoint) | |
| for mod in set(imports): | |
| try: | |
| __import__(mod) | |
| print(f"β {{mod}} - SUCCESS") | |
| except Exception as e: | |
| print(f"β {{mod}} - FAILED: {{e}}") | |
| sys.exit(1) | |
| print("β All imports tested successfully!") | |
| ''') | |
| try: | |
| # Pull the amd64 variant explicitly | |
| pull_result = subprocess.run( | |
| ["docker", "pull", "--platform", "linux/amd64", image_tag], | |
| capture_output=True, | |
| text=True | |
| ) | |
| print(pull_result.stdout, file=sys.stdout) | |
| print(pull_result.stderr, file=sys.stderr) | |
| # Run the container with the Python test code | |
| result = subprocess.run( | |
| [ | |
| "docker", "run", "--rm", | |
| "--platform", "linux/amd64", | |
| "--entrypoint", "python", | |
| image_tag, | |
| "-c", test_code | |
| ], | |
| capture_output=True, | |
| text=True | |
| ) | |
| # Print container logs | |
| print(result.stdout, file=sys.stdout) | |
| print(result.stderr, file=sys.stderr) | |
| if result.returncode != 0: | |
| warning_message = f"Import testing FAILED (amd64) for worker '{worker_name}' with exit code {result.returncode}" | |
| print("β οΈ", warning_message) | |
| print(f"::warning::{warning_message}") | |
| else: | |
| print("β Import testing succeeded (amd64)") | |
| except Exception as e: | |
| print("::warning::Error during import testing (amd64):", e) | |
| sys.exit(1) | |
| EOF | |
| - name: Test imports in the container (arm64) | |
| if: ${{ steps.check-rebuild.outputs.rebuild == 'true' && contains(env.PLATFORMS, 'linux/arm64') }} | |
| run: | | |
| python <<EOF | |
| import subprocess | |
| import sys | |
| import textwrap | |
| image_tag = "ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.IMAGE_TAG }}" | |
| command = "${{ env.COMMAND }}" | |
| worker_name = "${{ matrix.directory }}" | |
| print(f"π Testing (arm64) Python imports in built image '{image_tag}' for worker '{worker_name}'...") | |
| # in-container python snippet (AST-based import checking): | |
| test_code = textwrap.dedent(f''' | |
| import os, sys, ast, os.path as osp | |
| entrypoint_full = "{command}" | |
| fallback_dir = "{worker_name}" | |
| if "/" in entrypoint_full: | |
| dir_part = osp.dirname(entrypoint_full) | |
| file_part = osp.basename(entrypoint_full) | |
| # If directory doesn't exist but fallback_dir does, change to fallback. | |
| if not osp.isdir(dir_part) and osp.isdir(fallback_dir): | |
| dir_part = fallback_dir | |
| os.chdir(osp.join(os.getcwd(), dir_part)) | |
| entrypoint = file_part | |
| else: | |
| entrypoint = entrypoint_full | |
| if not osp.exists(entrypoint): | |
| print("β ERROR: {{}} not found inside the container.".format(entrypoint)) | |
| sys.exit(1) | |
| with open(entrypoint, 'r', encoding='utf-8') as f: | |
| source = f.read() | |
| try: | |
| tree = ast.parse(source) | |
| except SyntaxError as e: | |
| print(f"β Syntax error in {{entrypoint}}: {{e}}") | |
| sys.exit(1) | |
| imports = [] | |
| for node in ast.walk(tree): | |
| if isinstance(node, ast.Import): | |
| for alias in node.names: | |
| imports.append(alias.name) | |
| elif isinstance(node, ast.ImportFrom): | |
| if node.module: | |
| imports.append(node.module) | |
| print("π Checking Python imports from", entrypoint) | |
| for mod in set(imports): | |
| try: | |
| __import__(mod) | |
| print(f"β {{mod}} - SUCCESS") | |
| except Exception as e: | |
| print(f"β {{mod}} - FAILED: {{e}}") | |
| sys.exit(1) | |
| print("β All imports tested successfully!") | |
| ''') | |
| try: | |
| # Pull the arm64 variant explicitly | |
| pull_result = subprocess.run( | |
| ["docker", "pull", "--platform", "linux/arm64", image_tag], | |
| capture_output=True, | |
| text=True | |
| ) | |
| print(pull_result.stdout, file=sys.stdout) | |
| print(pull_result.stderr, file=sys.stderr) | |
| # Run the container with the Python test code | |
| result = subprocess.run( | |
| [ | |
| "docker", "run", "--rm", | |
| "--platform", "linux/arm64", | |
| "--entrypoint", "python", | |
| image_tag, | |
| "-c", test_code | |
| ], | |
| capture_output=True, | |
| text=True | |
| ) | |
| # Print container logs | |
| print(result.stdout, file=sys.stdout) | |
| print(result.stderr, file=sys.stderr) | |
| if result.returncode != 0: | |
| warning_message = f"Import testing FAILED (arm64) for worker '{worker_name}' with exit code {result.returncode}" | |
| print("β οΈ", warning_message) | |
| print(f"::warning::{warning_message}") | |
| else: | |
| print("β Import testing succeeded (arm64)") | |
| except Exception as e: | |
| print("::warning::Error during import testing (arm64):", e) | |
| sys.exit(1) | |
| EOF | |
| build_responders: | |
| name: Build Responders | |
| needs: generate-matrix | |
| runs-on: ubuntu-latest | |
| continue-on-error: true | |
| strategy: | |
| max-parallel: 20 | |
| matrix: ${{ fromJson(needs.generate-matrix.outputs.responders_matrix) }} | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Set up QEMU | |
| uses: docker/setup-qemu-action@v3 | |
| - name: Set up Docker Buildx | |
| uses: docker/setup-buildx-action@v3 | |
| - name: GHCR Login | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: ${{ github.actor }} | |
| password: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Convert directory to lowercase | |
| id: lowercase_dir | |
| run: | | |
| lower_dir=$(echo "${{ matrix.directory }}" | tr '[:upper:]' '[:lower:]') | |
| echo "lower_dir=${lower_dir}" >> $GITHUB_ENV | |
| - name: Set lowercase repository owner | |
| run: | | |
| owner="${{ github.repository_owner }}" | |
| lower_owner=$(echo "$owner" | tr '[:upper:]' '[:lower:]') | |
| echo "LOWER_REPO_OWNER=$lower_owner" >> $GITHUB_ENV | |
| - name: Parse JSON and set environment variables from matrix.path (using jq) | |
| id: parse_json | |
| run: | | |
| json_file="./responders/${{ matrix.path }}" | |
| if [ -f "$json_file" ]; then | |
| lower_name=$(jq -r '.name | ascii_downcase' "$json_file") | |
| version=$(jq -r '.version // empty' "$json_file") | |
| description=$(jq -r '.description // empty' "$json_file") | |
| command=$(jq -r '.command // empty' "$json_file") | |
| echo "LOWERCASE_NAME=${lower_name}" >> $GITHUB_ENV | |
| echo "VERSION=${version}" >> $GITHUB_ENV | |
| echo "DESCRIPTION=${description}" >> $GITHUB_ENV | |
| echo "COMMAND=${command}" >> $GITHUB_ENV | |
| if [[ "$version" == *.* ]]; then | |
| version_split=$(echo "$version" | cut -d '.' -f 1) | |
| echo "VERSION_SPLIT=${version_split}" >> $GITHUB_ENV | |
| else | |
| echo "VERSION_SPLIT=${version}" >> $GITHUB_ENV | |
| fi | |
| else | |
| echo "File not found: $json_file" | |
| exit 1 | |
| fi | |
| - name: Check and create Dockerfile if not present | |
| run: | | |
| dockerfile_path="responders/${{ matrix.directory }}/Dockerfile" | |
| matrix_directory="${{ matrix.directory }}" | |
| command_value="${{ env.COMMAND }}" | |
| # Add multiple workers separated by spaces | |
| special_alpine_workers="PaloAltoNGFW Worker2 Worker3 AnotherWorker" | |
| if [ ! -f "$dockerfile_path" ]; then | |
| echo "Dockerfile not found in $dockerfile_path. Creating one..." | |
| # echo "FROM python:3-alpine" > "$dockerfile_path" | |
| # echo "RUN apk add --no-cache openssl ca-certificates bind-tools" >> "$dockerfile_path" | |
| echo "FROM python:3-slim" > "$dockerfile_path" | |
| # Check if current worker is among special alpine workers | |
| if echo "$special_alpine_workers" | grep -qw "$matrix_directory"; then | |
| # echo "RUN apk add --no-cache file-dev && rm -rf /var/cache/apk/*" >> "$dockerfile_path" | |
| echo "RUN apt-get update && apt-get install -y --no-install-recommends libmagic1 && rm -rf /var/lib/apt/lists/*" >> "$dockerfile_path" | |
| fi | |
| echo "WORKDIR /worker" >> "$dockerfile_path" | |
| echo "COPY requirements.txt ${matrix_directory}/" >> "$dockerfile_path" | |
| echo "RUN test ! -e ${matrix_directory}/requirements.txt || pip install --no-cache-dir -r ${matrix_directory}/requirements.txt" >> "$dockerfile_path" | |
| echo "COPY . ${matrix_directory}/" >> "$dockerfile_path" | |
| echo "ENTRYPOINT [\"python\", \"${command_value}\"]" >> "$dockerfile_path" | |
| else | |
| echo "Dockerfile exists: $dockerfile_path" | |
| fi | |
| - name: Check if image needs rebuild | |
| id: check-rebuild | |
| run: | | |
| image="ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.VERSION }}" | |
| current_sha="${{ github.sha }}" | |
| token="${{ secrets.GITHUB_TOKEN }}" | |
| # Fetch image manifest from GHCR | |
| manifest_response=$(curl -sSL \ | |
| -H "Authorization: Bearer $token" \ | |
| -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \ | |
| "https://ghcr.io/v2/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}/manifests/${{ env.VERSION }}") | |
| # Check if the manifest response contains a valid digest | |
| labels=$(echo "$manifest_response" | jq -r '.config.digest // empty') | |
| if [[ -z "$labels" ]]; then | |
| echo "No existing image or unable to fetch manifest. rebuild needed" | |
| echo "rebuild=true" >> $GITHUB_OUTPUT | |
| exit 0 | |
| fi | |
| # Fetch image config blob to extract labels | |
| config_response=$(curl -sSL \ | |
| -H "Authorization: Bearer $token" \ | |
| "https://ghcr.io/v2/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}/blobs/$labels") | |
| # Extract image label safely | |
| image_labels=$(echo "$config_response" | jq -r '.config.Labels["org.opencontainers.image.revision"] // empty') | |
| # Debugging: print values | |
| echo "current_sha: $current_sha" | |
| echo "image_labels: $image_labels" | |
| if [[ "$image_labels" == "$current_sha" ]]; then | |
| echo "No rebuild needed. SHA matches: $current_sha" | |
| echo "rebuild=false" >> $GITHUB_OUTPUT | |
| else | |
| echo "SHA mismatch or missing label. rebuild needed" | |
| echo "rebuild=true" >> $GITHUB_OUTPUT | |
| fi | |
| - name: Set build date | |
| id: build_date | |
| run: echo "date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> "$GITHUB_OUTPUT" | |
| - name: Set Image Tags | |
| run: | | |
| if [[ "${{ github.ref }}" =~ ^refs/tags/ ]] || [ "${{ github.event_name }}" == "schedule" ]; then | |
| # Keep one image tag for import testing | |
| echo "IMAGE_TAG=${{ env.VERSION_SPLIT }}" >> $GITHUB_ENV | |
| # Produce three tags: | |
| # 1) :VERSION (e.g. 2.0) | |
| # 2) :VERSION_SPLIT (e.g. 2) | |
| # 3) :latest | |
| echo "IMAGE_TAGS=ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.VERSION }},ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.VERSION_SPLIT }}" >> $GITHUB_ENV | |
| else | |
| echo "IMAGE_TAG=devel" >> $GITHUB_ENV | |
| # Only the :devel tag | |
| echo "IMAGE_TAGS=ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:devel" >> $GITHUB_ENV | |
| fi | |
| - name: Set Platforms | |
| id: set_platforms | |
| run: | | |
| NO_ARM64_DIRS="MSDefenderOffice365" | |
| CURRENT_DIR="${{ matrix.directory }}" | |
| # Default to multi-arch | |
| PLATFORMS="linux/amd64,linux/arm64" | |
| # Check if CURRENT_DIR is in the NO_ARM64_DIRS list | |
| if echo "$NO_ARM64_DIRS" | grep -qw "$CURRENT_DIR"; then | |
| echo "Directory '$CURRENT_DIR' is in NO_ARM64_DIRS; limiting to linux/amd64 only." | |
| PLATFORMS="linux/amd64" | |
| fi | |
| echo "PLATFORMS=$PLATFORMS" >> $GITHUB_ENV | |
| - name: Build and push multi-arch image to GHCR | |
| if: steps.check-rebuild.outputs.rebuild == 'true' | |
| uses: docker/build-push-action@v6 | |
| with: | |
| context: responders/${{ matrix.directory }} | |
| file: ./responders/${{ matrix.directory }}/Dockerfile | |
| platforms: ${{ env.PLATFORMS }} | |
| push: true | |
| tags: ${{ env.IMAGE_TAGS }} | |
| labels: | | |
| org.opencontainers.image.created=${{ steps.build_date.outputs.date }} | |
| org.opencontainers.image.title=${{ env.LOWERCASE_NAME }} | |
| org.opencontainers.image.description=${{ env.DESCRIPTION }} | |
| org.opencontainers.image.url=https://thehive-project.org | |
| org.opencontainers.image.source=https://github.com/TheHive-Project/Cortex-Analyzers | |
| org.opencontainers.image.revision=${{ github.sha }} | |
| org.opencontainers.image.vendor=TheHive Project | |
| org.opencontainers.image.version=${{ env.VERSION }} | |
| annotations: | | |
| org.opencontainers.image.description=${{ env.DESCRIPTION }} | |
| org.opencontainers.image.source=https://github.com/${{ github.repository }} | |
| org.opencontainers.image.revision=${{ github.sha }} | |
| org.opencontainers.image.title=${{ env.LOWERCASE_NAME }} | |
| org.opencontainers.image.url=https://thehive-project.org | |
| org.opencontainers.image.version=${{ env.VERSION }} | |
| - name: Scan image for vulnerabilities (Trivy) | |
| uses: aquasecurity/trivy-action@0.32.0 | |
| with: | |
| image-ref: ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.IMAGE_TAG }} | |
| format: table | |
| vuln-type: 'os,library' | |
| severity: 'CRITICAL,HIGH' | |
| exit-code: 0 | |
| ignore-unfixed: true | |
| - name: Test imports in the container (amd64) | |
| if: ${{ steps.check-rebuild.outputs.rebuild == 'true' && contains(env.PLATFORMS, 'linux/amd64') }} | |
| run: | | |
| python <<EOF | |
| import subprocess | |
| import sys | |
| import textwrap | |
| image_tag = "ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.IMAGE_TAG }}" | |
| command = "${{ env.COMMAND }}" | |
| worker_name = "${{ matrix.directory }}" | |
| print(f"π Testing (amd64) Python imports in built image '{image_tag}' for worker '{worker_name}'...") | |
| # in-container python snippet (AST-based import checking): | |
| test_code = textwrap.dedent(f''' | |
| import os, sys, ast, os.path as osp | |
| entrypoint_full = "{command}" | |
| fallback_dir = "{worker_name}" | |
| if "/" in entrypoint_full: | |
| dir_part = osp.dirname(entrypoint_full) | |
| file_part = osp.basename(entrypoint_full) | |
| # If directory doesn't exist but fallback_dir does, change to fallback. | |
| if not osp.isdir(dir_part) and osp.isdir(fallback_dir): | |
| dir_part = fallback_dir | |
| os.chdir(osp.join(os.getcwd(), dir_part)) | |
| entrypoint = file_part | |
| else: | |
| entrypoint = entrypoint_full | |
| if not osp.exists(entrypoint): | |
| print("β ERROR: {{}} not found inside the container.".format(entrypoint)) | |
| sys.exit(1) | |
| with open(entrypoint, 'r', encoding='utf-8') as f: | |
| source = f.read() | |
| try: | |
| tree = ast.parse(source) | |
| except SyntaxError as e: | |
| print(f"β Syntax error in {{entrypoint}}: {{e}}") | |
| sys.exit(1) | |
| imports = [] | |
| for node in ast.walk(tree): | |
| if isinstance(node, ast.Import): | |
| for alias in node.names: | |
| imports.append(alias.name) | |
| elif isinstance(node, ast.ImportFrom): | |
| if node.module: | |
| imports.append(node.module) | |
| print("π Checking Python imports from", entrypoint) | |
| for mod in set(imports): | |
| try: | |
| __import__(mod) | |
| print(f"β {{mod}} - SUCCESS") | |
| except Exception as e: | |
| print(f"β {{mod}} - FAILED: {{e}}") | |
| sys.exit(1) | |
| print("β All imports tested successfully!") | |
| ''') | |
| try: | |
| # Pull the amd64 variant explicitly | |
| pull_result = subprocess.run( | |
| ["docker", "pull", "--platform", "linux/amd64", image_tag], | |
| capture_output=True, | |
| text=True | |
| ) | |
| print(pull_result.stdout, file=sys.stdout) | |
| print(pull_result.stderr, file=sys.stderr) | |
| # Run the container with the Python test code | |
| result = subprocess.run( | |
| [ | |
| "docker", "run", "--rm", | |
| "--platform", "linux/amd64", | |
| "--entrypoint", "python", | |
| image_tag, | |
| "-c", test_code | |
| ], | |
| capture_output=True, | |
| text=True | |
| ) | |
| # Print container logs | |
| print(result.stdout, file=sys.stdout) | |
| print(result.stderr, file=sys.stderr) | |
| if result.returncode != 0: | |
| warning_message = f"Import testing FAILED (amd64) for worker '{worker_name}' with exit code {result.returncode}" | |
| print("β οΈ", warning_message) | |
| print(f"::warning::{warning_message}") | |
| else: | |
| print("β Import testing succeeded (amd64)") | |
| except Exception as e: | |
| print("::warning::Error during import testing (amd64):", e) | |
| sys.exit(1) | |
| EOF | |
| - name: Test imports in the container (arm64) | |
| if: ${{ steps.check-rebuild.outputs.rebuild == 'true' && contains(env.PLATFORMS, 'linux/arm64') }} | |
| run: | | |
| python <<EOF | |
| import subprocess | |
| import sys | |
| import textwrap | |
| image_tag = "ghcr.io/${{ env.LOWER_REPO_OWNER }}/${{ env.LOWERCASE_NAME }}:${{ env.IMAGE_TAG }}" | |
| command = "${{ env.COMMAND }}" | |
| worker_name = "${{ matrix.directory }}" | |
| print(f"π Testing (arm64) Python imports in built image '{image_tag}' for worker '{worker_name}'...") | |
| # in-container python snippet (AST-based import checking): | |
| test_code = textwrap.dedent(f''' | |
| import os, sys, ast, os.path as osp | |
| entrypoint_full = "{command}" | |
| fallback_dir = "{worker_name}" | |
| if "/" in entrypoint_full: | |
| dir_part = osp.dirname(entrypoint_full) | |
| file_part = osp.basename(entrypoint_full) | |
| # If directory doesn't exist but fallback_dir does, change to fallback. | |
| if not osp.isdir(dir_part) and osp.isdir(fallback_dir): | |
| dir_part = fallback_dir | |
| os.chdir(osp.join(os.getcwd(), dir_part)) | |
| entrypoint = file_part | |
| else: | |
| entrypoint = entrypoint_full | |
| if not osp.exists(entrypoint): | |
| print("β ERROR: {{}} not found inside the container.".format(entrypoint)) | |
| sys.exit(1) | |
| with open(entrypoint, 'r', encoding='utf-8') as f: | |
| source = f.read() | |
| try: | |
| tree = ast.parse(source) | |
| except SyntaxError as e: | |
| print(f"β Syntax error in {{entrypoint}}: {{e}}") | |
| sys.exit(1) | |
| imports = [] | |
| for node in ast.walk(tree): | |
| if isinstance(node, ast.Import): | |
| for alias in node.names: | |
| imports.append(alias.name) | |
| elif isinstance(node, ast.ImportFrom): | |
| if node.module: | |
| imports.append(node.module) | |
| print("π Checking Python imports from", entrypoint) | |
| for mod in set(imports): | |
| try: | |
| __import__(mod) | |
| print(f"β {{mod}} - SUCCESS") | |
| except Exception as e: | |
| print(f"β {{mod}} - FAILED: {{e}}") | |
| sys.exit(1) | |
| print("β All imports tested successfully!") | |
| ''') | |
| try: | |
| # Pull the arm64 variant explicitly | |
| pull_result = subprocess.run( | |
| ["docker", "pull", "--platform", "linux/arm64", image_tag], | |
| capture_output=True, | |
| text=True | |
| ) | |
| print(pull_result.stdout, file=sys.stdout) | |
| print(pull_result.stderr, file=sys.stderr) | |
| # Run the container with the Python test code | |
| result = subprocess.run( | |
| [ | |
| "docker", "run", "--rm", | |
| "--platform", "linux/arm64", | |
| "--entrypoint", "python", | |
| image_tag, | |
| "-c", test_code | |
| ], | |
| capture_output=True, | |
| text=True | |
| ) | |
| # Print container logs | |
| print(result.stdout, file=sys.stdout) | |
| print(result.stderr, file=sys.stderr) | |
| if result.returncode != 0: | |
| warning_message = f"Import testing FAILED (arm64) for worker '{worker_name}' with exit code {result.returncode}" | |
| print("β οΈ", warning_message) | |
| print(f"::warning::{warning_message}") | |
| else: | |
| print("β Import testing succeeded (arm64)") | |
| except Exception as e: | |
| print("::warning::Error during import testing (arm64):", e) | |
| sys.exit(1) | |
| EOF | |
| build_catalog: | |
| name: Build Catalog | |
| runs-on: ubuntu-latest | |
| #needs: [ build_responders ] | |
| needs: [ build_analyzers, build_responders ] | |
| if: always() | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Set lowercase repository owner | |
| run: | | |
| owner="${{ github.repository_owner }}" | |
| lower_owner=$(echo "$owner" | tr '[:upper:]' '[:lower:]') | |
| echo "LOWER_REPO_OWNER=$lower_owner" >> $GITHUB_ENV | |
| - name: Build catalog JSON files | |
| run: | | |
| build_catalog() { | |
| DIR=$1 | |
| jq -s '[.[] | del(.command) + { dockerImage: ("ghcr.io/${{ env.LOWER_REPO_OWNER }}/" + (.name | ascii_downcase) + ":devel") }]' \ | |
| ${DIR}/*/*.json > ${DIR}/${DIR}-devel.json | |
| jq -s '[.[] | del(.command) + { dockerImage: ("ghcr.io/${{ env.LOWER_REPO_OWNER }}/" + (.name | ascii_downcase) + ":" + .version) }]' \ | |
| ${DIR}/*/*.json > ${DIR}/${DIR}-stable.json | |
| jq -s '[.[] | del(.command) + { dockerImage: ("ghcr.io/${{ env.LOWER_REPO_OWNER }}/" + (.name | ascii_downcase) + ":" + (.version | split("."))[0]) }]' \ | |
| ${DIR}/*/*.json > ${DIR}/${DIR}.json | |
| } | |
| build_catalog analyzers | |
| build_catalog responders | |
| - name: Zip report-templates | |
| run: zip -r ../analyzers/report-templates.zip * | |
| working-directory: thehive-templates | |
| - name: Save Artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: catalog | |
| path: | | |
| analyzers/analyzers.json | |
| analyzers/analyzers-devel.json | |
| analyzers/analyzers-stable.json | |
| analyzers/report-templates.zip | |
| responders/responders.json | |
| responders/responders-devel.json | |
| responders/responders-stable.json | |
| - name: Make Release | |
| uses: softprops/action-gh-release@v2 | |
| if: startsWith(github.ref, 'refs/tags/') | |
| with: | |
| generate_release_notes: true | |
| files: | | |
| analyzers/analyzers-stable.json | |
| analyzers/analyzers.json | |
| analyzers/report-templates.zip | |
| responders/responders-stable.json | |
| responders/responders.json | |
| build_docs: | |
| name: Build documentation | |
| runs-on: ubuntu-latest | |
| #needs: [ build_responders ] | |
| needs: [ build_analyzers, build_responders ] | |
| if: startsWith(github.ref, 'refs/tags/') && always() | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Prepare documentation files | |
| uses: docker://thehiveproject/doc-builder | |
| with: | |
| args: --type Cortex-Neurons | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: "3.x" | |
| architecture: x64 | |
| - name: Install requirements | |
| run: python3 -m pip install -r utils/test_doc/requirements.txt | |
| - name: Set up git user | |
| run: | | |
| git config user.name 'github-actions[bot]' | |
| git config user.email 'github-actions[bot]@users.noreply.github.com' | |
| - name: Deploy documentation | |
| run: python3 -m mkdocs gh-deploy --remote-branch gh-pages --force | |
| notify: | |
| name: Notify | |
| #needs: [ build_responders, build_catalog, build_docs ] | |
| needs: [ build_analyzers, build_responders, build_catalog, build_docs ] | |
| runs-on: ubuntu-latest | |
| if: true | |
| steps: | |
| - name: Slack notification | |
| uses: Gamesight/slack-workflow-status@master | |
| with: | |
| repo_token: ${{ secrets.GITHUB_TOKEN }} | |
| slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} | |
| channel: "#ci-cortex" | |
| name: Cortex Analyzers build | |
| include_commit_message: true | |
| include_jobs: true |