Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
c596bdf
test: add script to convert perf metrics to prometheus format
ljagiela May 20, 2025
97610f9
chore: add prom metrics uploader to e2e
bernokl May 20, 2025
893b218
chore: update http.server root
bernokl May 20, 2025
b2b9a3c
test: sub quote for single quote in dapp e2e
bernokl May 23, 2025
1f8f69f
test: escape double quotes for prometheus output
ljagiela May 23, 2025
f7602d4
Revert "test: sub quote for single quote in dapp e2e"
ljagiela May 23, 2025
bb043de
Merge branch 'main' into test/LW-12806_prometheus_script
ljagiela May 23, 2025
26a8d83
test: save timestamp in unix format
ljagiela May 23, 2025
692b146
chore: reduce sleep for prom to 20s
bernokl May 23, 2025
9397f95
chore: re-write metrics to test timestamp
bernokl May 23, 2025
f707bb7
chore: temporary file level sed of metrics
bernokl May 23, 2025
d5a917f
chore: strip timestamp
bernokl May 23, 2025
3a19c68
chore: move timestamp back to labels
bernokl May 30, 2025
0ca4a44
chore: set up grafana alloy
bernokl Jun 27, 2025
aacf504
chore: test datadog ci agent
bernokl Jul 29, 2025
70e2346
docs: minor update
bernokl Jul 29, 2025
b1d1553
Update Datadog CI workflow to trigger on test and feature branches
bernokl Jul 29, 2025
08a37c0
Fix Datadog CI workflow - use direct API calls instead of non-existen…
bernokl Jul 29, 2025
b9828eb
Fix workflow: correct catchpoint action version and add Datadog API d…
bernokl Jul 29, 2025
bbbd9da
Fix release-pkg job: use correct build/app action instead of non-exis…
bernokl Jul 29, 2025
d609376
test: enable datadog-ci workflow and add debugging tools
bernokl Jul 30, 2025
cf1521b
chore: change datadog endpoint
bernokl Jul 30, 2025
98a5d4c
chore: change datadog endpoint
bernokl Jul 30, 2025
83baac6
chore: add my branch to the run
bernokl Jul 30, 2025
60948df
chore: fix tests
bernokl Jul 30, 2025
7fc2964
Update Datadog API key and trigger workflow test
bernokl Jul 31, 2025
ebefcf1
chore: test datadog v2 ci agent
bernokl Aug 19, 2025
5a6a8f4
chore: mimimum datadog test
bernokl Aug 21, 2025
53a36f7
chore: mimimum datadog test in main
bernokl Aug 21, 2025
916a36e
chore: working datadog api
bernokl Aug 21, 2025
0c1b5b0
chore: add ci metrics to working test-datadog
bernokl Aug 21, 2025
351633a
chore: troubleshooting events
bernokl Aug 21, 2025
b6cbce7
chore: more metrics in test-datadog
bernokl Aug 21, 2025
c0429b0
docs: Add comprehensive CI dashboard
bernokl Aug 22, 2025
569f136
chore: add e2e mem/cpu metrics to DD
bernokl Aug 22, 2025
291cd76
chore: add e2e mem/cpu metrics to DD
bernokl Aug 22, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
560 changes: 560 additions & 0 deletions .github/workflows/ci-backup.yml

Large diffs are not rendered by default.

489 changes: 155 additions & 334 deletions .github/workflows/ci.yml

Large diffs are not rendered by default.

89 changes: 89 additions & 0 deletions .github/workflows/datadog-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
name: Datadog CI Integration

on:
workflow_call:
inputs:
datadog-api-key:
required: true
type: string
description: 'Datadog API key for CI metrics and logs'
datadog-site:
required: false
type: string
default: 'datadoghq.com'
description: 'Datadog site (datadoghq.com, datadoghq.eu, etc.)'
service-name:
required: true
type: string
description: 'Service name for Datadog metrics'
environment:
required: false
type: string
default: 'ci'
description: 'Environment name'
tags:
required: false
type: string
default: 'ci:true'
description: 'Additional tags for metrics (comma-separated)'

jobs:
datadog-setup:
runs-on: ubuntu-latest
outputs:
dd-trace-id: ${{ steps.datadog-setup.outputs.trace-id }}
dd-span-id: ${{ steps.datadog-setup.outputs.span-id }}
steps:
- name: Setup Datadog CI
id: datadog-setup
run: |
echo "trace-id=$(date +%s)" >> $GITHUB_OUTPUT
echo "span-id=$(date +%s)" >> $GITHUB_OUTPUT

datadog-monitor:
runs-on: ubuntu-latest
needs: datadog-setup
if: always()
steps:
- name: Send CI metrics to Datadog
run: |
curl -X POST "https://api.${{ inputs.datadog-site }}/api/v1/series" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${{ inputs.datadog-api-key }}" \
-d '{
"series": [{
"metric": "github.ci.pipeline.duration",
"points": [[$(date +%s), ${{ github.run_duration }}]],
"tags": ["service:${{ inputs.service-name }}", "env:${{ inputs.environment }}", "workflow:${{ github.workflow }}", "job:${{ github.job }}"],
"type": "gauge"
}, {
"metric": "github.ci.pipeline.status",
"points": [[$(date +%s), 1]],
"tags": ["service:${{ inputs.service-name }}", "env:${{ inputs.environment }}", "workflow:${{ github.workflow }}", "job:${{ github.job }}", "status:${{ job.status }}"],
"type": "gauge"
}, {
"metric": "github.ci.repository.workflow_runs",
"points": [[$(date +%s), 1]],
"tags": ["service:${{ inputs.service-name }}", "env:${{ inputs.environment }}", "repo:${{ github.repository }}", "branch:${{ github.ref_name }}"],
"type": "gauge"
}, {
"metric": "github.ci.job.duration",
"points": [[$(date +%s), ${{ github.run_duration }}]],
"tags": ["service:${{ inputs.service-name }}", "env:${{ inputs.environment }}", "job:${{ github.job }}", "runner:${{ runner.os }}"],
"type": "gauge"
}]
}'

- name: Send CI events to Datadog
if: always()
run: |
curl -X POST "https://api.${{ inputs.datadog-site }}/api/v1/events" \
-H "Content-Type: application/json" \
-H "DD-API-KEY: ${{ inputs.datadog-api-key }}" \
-d '{
"title": "Lace CI Pipeline: ${{ github.workflow }}",
"text": "Workflow ${{ github.workflow }} completed with status: ${{ job.status }}\nRepository: ${{ github.repository }}\nBranch: ${{ github.ref_name }}\nCommit: ${{ github.sha }}",
"tags": ["service:${{ inputs.service-name }}", "env:${{ inputs.environment }}", "workflow:${{ github.workflow }}", "status:${{ job.status }}", "repo:${{ github.repository }}"],
"alert_type": "${{ job.status == 'success' && 'info' || 'error' }}",
"source_type_name": "github"
}'
82 changes: 82 additions & 0 deletions .github/workflows/e2e-tests-linux-split.yml
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,9 @@ jobs:
# when cancelling job always() will prevent step from being cancelled and we don't want process results in this case
if: ${{ success() || failure() }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Download all artifacts
uses: actions/download-artifact@v4
with:
Expand All @@ -212,6 +215,11 @@ jobs:
tags=${{ needs.setup.outputs.tags }}
" > environment.properties

- name: Convert metrics JSON to Datadog format
working-directory: './packages/e2e-tests/tools/'
run: |
bash convert_metrics_to_datadog.sh "${{ github.workflow }}" "${{ github.run_id }}" ../../../artifacts/metrics "${{ secrets.DATADOG_API_KEY }}"

- name: Publish allure report to S3
uses: andrcuns/[email protected]
env:
Expand Down Expand Up @@ -260,3 +268,77 @@ jobs:
with:
name: performance-metrics
path: ./artifacts/metrics

- name: Send E2E metrics to Datadog
run: |
echo "📊 E2E metrics have been sent to Datadog via the conversion script"
echo "📁 Datadog payload saved to: ./artifacts/metrics/datadog_payload.json"
echo "🔍 Check the previous step for detailed metrics information"

# ------------------------------------------------------------------------
# 3) Write Alloy (River) config — scrape localhost:9101 and remote_write
# ------------------------------------------------------------------------


# ------------------------------------------------------------------------
# 4) Launch Alloy for ~45 s to scrape + push the data, then shut it down.
# ------------------------------------------------------------------------


# - name: Create Prometheus config
# env:
# PUSH_URL: ${{ secrets.GRAFANA_PUSH_URL }} # e.g., https://prometheus-us-central1.grafana.net/api/prom/push
# PUSH_USER: ${{ secrets.GRAFANA_USERNAME }} # e.g., 787878
# PUSH_PASS: ${{ secrets.GRAFANA_PASSWORD }} # e.g., eyJrIjoxxxxxxxxxxxxxxyMX0=
# run: |
# cat >prometheus.yml <<EOF
# global:
# scrape_interval: 5s
# scrape_configs:
# - job_name: 'e2e-perf-metrics'
# static_configs:
# - targets: ['pushgateway:9091']
# remote_write:
# - url: "$PUSH_URL"
# basic_auth:
# username: "$PUSH_USER"
# password: "$PUSH_PASS"
# EOF
# pwd
# ls -al ./artifacts/metrics/prometheus.txt
# head ./artifacts/metrics/prometheus.txt
#
# - name: Start pushgateway server
# run: |
# docker run -d --name pushgateway -p 9091:9091 prom/pushgateway
# cat ./artifacts/metrics/prometheus.txt | curl --data-binary @- http://localhost:9091/metrics/job/e2e-perf
# #docker run -d --name http-server -v $(pwd):/app -w /app -p 8000:8000 python:3.11-slim python -m http.server 8000 > http.log 2>&1
# printf '\n*****************\n'
# echo 'This is my current directory:'
# pwd
# printf '\n*****************\n'
# echo 'This is ls -al in that directory'
# ls -al
# printf '\n*****************\n'
#
# - name: Run Prometheus
# run: |
# # Enable logs for debugging
# #docker run --name prometheus -v $(pwd):/etc/prometheus -p 9090:9090 --link pushgateway:pushgateway prom/prometheus:latest --config.file=/etc/prometheus/prometheus.yml --web.listen-address=:9090 --log.level=debug
# docker run -d --name prometheus -v $(pwd):/etc/prometheus -p 9090:9090 --link pushgateway:pushgateway prom/prometheus:latest --config.file=/etc/prometheus/prometheus.yml --web.listen-address=:9090 > prom.log 2>&1
# sleep 60
# # Uncomment for troubleshooting.
# printf '\n*****************\n'
# printf '\n curl e2e_cpu_seconds_total \n'
# curl http://localhost:9090/api/v1/query?query=e2e_cpu_seconds_total_v1
# printf '\n*****************\n'
# printf '\n cat prom.log \n'
# cat prom.log
# #printf '\n*****************\n'
# #printf '\n send_failures_total \n'
# #curl 'http://localhost:9090/api/v1/query?query=prometheus_remote_storage_queue_send_failures_total'
# #printf '\n*****************\n'
# #printf '\n storage_retries_total \n'
# #curl 'http://localhost:9090/api/v1/query?query=prometheus_remote_storage_retries_total'


Loading
Loading