1
+ #! /bin/bash
2
+
3
+ set -euo pipefail
4
+
5
+ JOB=${1:- }
6
+ INSTANCE=${2:- }
7
+ METRICS_DIR=${3:- metrics}
8
+ DATADOG_API_KEY=${4:- }
9
+
10
+ if [[ -z " $JOB " || -z " $INSTANCE " ]]; then
11
+ echo " Usage: $0 <job> <instance> [metrics_dir] [datadog_api_key]"
12
+ echo " job - GitHub workflow name"
13
+ echo " instance - GitHub run ID"
14
+ echo " metrics_dir - (optional) Directory containing JSON metrics files (default: metrics)"
15
+ echo " datadog_api_key - (optional) Datadog API key for direct sending"
16
+ exit 1
17
+ fi
18
+
19
+ if [[ ! -d " $METRICS_DIR " ]]; then
20
+ echo " Metrics directory '$METRICS_DIR ' does not exist."
21
+ exit 0
22
+ fi
23
+
24
+ shopt -s nullglob
25
+ FILES=(" $METRICS_DIR " /* .json)
26
+
27
+ if [[ ${# FILES[@]} -eq 0 ]]; then
28
+ echo " No JSON files found in '$METRICS_DIR '."
29
+ exit 0
30
+ fi
31
+
32
+ # Prepare Datadog payload
33
+ TIMESTAMP=$( date +%s)
34
+ SERIES=()
35
+
36
+ for file in " ${FILES[@]} " ; do
37
+ if [[ ! -s " $file " ]]; then
38
+ echo " Skipping empty file: $file "
39
+ continue
40
+ fi
41
+
42
+ # Validate JSON file
43
+ if ! jq empty " $file " 2> /dev/null; then
44
+ echo " Warning: Invalid JSON in $file , skipping"
45
+ continue
46
+ fi
47
+
48
+ filename=$( basename " $file " )
49
+ scenario_id=" ${filename%% -chrome-usage.json} "
50
+
51
+ # Better JSON escaping using jq
52
+ scenario_name=$( jq -r ' .scenarioName // empty' " $file " | jq -Rs . | sed ' s/^"//;s/"$//' )
53
+
54
+ data_length=$( jq ' .data | length' " $file " )
55
+
56
+ echo " Processing $filename (scenario: $scenario_name , data points: $data_length )"
57
+
58
+ if [[ " $data_length " -eq 0 ]]; then
59
+ # Add zero values if no data
60
+ SERIES+=(" {
61
+ \" metric\" : \" lace.e2e.cpu.seconds_total\" ,
62
+ \" points\" : [{\" timestamp\" : $TIMESTAMP , \" value\" : 0}],
63
+ \" tags\" : [\" scenario_name:$scenario_name \" , \" scenario_id:$scenario_id \" , \" job:$JOB \" , \" instance:$INSTANCE \" ]
64
+ }" )
65
+ SERIES+=(" {
66
+ \" metric\" : \" lace.e2e.memory.rss_bytes\" ,
67
+ \" points\" : [{\" timestamp\" : $TIMESTAMP , \" value\" : 0}],
68
+ \" tags\" : [\" scenario_name:$scenario_name \" , \" scenario_id:$scenario_id \" , \" job:$JOB \" , \" instance:$INSTANCE \" ]
69
+ }" )
70
+ else
71
+ # Process each data point - avoid subshell issues
72
+ while IFS= read -r entry; do
73
+ timestamp=$( echo " $entry " | jq -r ' .timestamp' )
74
+ cpu=$( echo " $entry " | jq -r ' .cpu' )
75
+ memory=$( echo " $entry " | jq -r ' .memory' )
76
+
77
+ # Better timestamp handling
78
+ if [[ " $timestamp " =~ ^[0-9]{10}$ ]]; then
79
+ # 10 digits - likely Unix timestamp (seconds)
80
+ unix_timestamp=$timestamp
81
+ elif [[ " $timestamp " =~ ^[0-9]{13}$ ]]; then
82
+ # 13 digits - likely Unix timestamp (milliseconds)
83
+ unix_timestamp=$(( timestamp / 1000 ))
84
+ else
85
+ # Unknown format, use current time
86
+ unix_timestamp=$TIMESTAMP
87
+ fi
88
+
89
+ # Validate numeric values
90
+ if [[ " $cpu " =~ ^[0-9]+\. ? [0-9]* $ ]] && [[ " $memory " =~ ^[0-9]+\. ? [0-9]* $ ]]; then
91
+ SERIES+=(" {
92
+ \" metric\" : \" lace.e2e.cpu.seconds_total\" ,
93
+ \" points\" : [{\" timestamp\" : $unix_timestamp , \" value\" : $cpu }],
94
+ \" tags\" : [\" scenario_name:$scenario_name \" , \" scenario_id:$scenario_id \" , \" job:$JOB \" , \" instance:$INSTANCE \" ]
95
+ }" )
96
+ SERIES+=(" {
97
+ \" metric\" : \" lace.e2e.memory.rss_bytes\" ,
98
+ \" points\" : [{\" timestamp\" : $unix_timestamp , \" value\" : $memory }],
99
+ \" tags\" : [\" scenario_name:$scenario_name \" , \" scenario_id:$scenario_id \" , \" job:$JOB \" , \" instance:$INSTANCE \" ]
100
+ }" )
101
+ else
102
+ echo " Warning: Invalid numeric values in $file (cpu: $cpu , memory: $memory )"
103
+ fi
104
+ done < <( jq -c ' .data[]' " $file " )
105
+ fi
106
+ done
107
+
108
+ # Check if we have any metrics
109
+ if [[ ${# SERIES[@]} -eq 0 ]]; then
110
+ echo " No valid metrics found to send"
111
+ exit 0
112
+ fi
113
+
114
+ # Create the final payload
115
+ PAYLOAD=" {
116
+ \" series\" : [$( IFS=,; echo " ${SERIES[*]} " ) ]
117
+ }"
118
+
119
+ echo " Generated Datadog payload with ${# SERIES[@]} metrics"
120
+
121
+ # Save payload to file for debugging
122
+ echo " $PAYLOAD " > " $METRICS_DIR /datadog_payload.json"
123
+ echo " Datadog payload saved to: $METRICS_DIR /datadog_payload.json"
124
+
125
+ # Send to Datadog if API key provided
126
+ if [[ -n " $DATADOG_API_KEY " ]]; then
127
+ echo " Sending metrics to Datadog..."
128
+
129
+ # Validate payload JSON
130
+ if ! echo " $PAYLOAD " | jq empty 2> /dev/null; then
131
+ echo " ❌ ERROR: Invalid JSON payload generated"
132
+ exit 1
133
+ fi
134
+
135
+ response=$( curl -s -w " \nHTTP_STATUS_CODE:%{http_code}\nTOTAL_TIME:%{time_total}s\n" \
136
+ -X POST " https://api.us5.datadoghq.com/api/v2/series" \
137
+ -H " Content-Type: application/json" \
138
+ -H " DD-API-KEY: $DATADOG_API_KEY " \
139
+ -d " $PAYLOAD " )
140
+
141
+ http_code=$( echo " $response " | grep " HTTP_STATUS_CODE:" | cut -d: -f2)
142
+ total_time=$( echo " $response " | grep " TOTAL_TIME:" | cut -d: -f2)
143
+ response_body=$( echo " $response " | sed ' /HTTP_STATUS_CODE:/d' | sed ' /TOTAL_TIME:/d' )
144
+
145
+ echo " Datadog API Response:"
146
+ echo " Status: $http_code "
147
+ echo " Time: ${total_time} s"
148
+ echo " Body: $response_body "
149
+
150
+ if [[ " $http_code " = " 202" ]]; then
151
+ echo " ✅ SUCCESS: E2E metrics sent to Datadog!"
152
+ else
153
+ echo " ❌ ERROR: Failed to send E2E metrics to Datadog"
154
+ echo " Response: $response_body "
155
+ exit 1
156
+ fi
157
+ else
158
+ echo " No Datadog API key provided, metrics saved to file only"
159
+ fi
0 commit comments