diff --git a/.evergreen/config.in.yml b/.evergreen/config.in.yml index caa7f202e83..1a3adee09d2 100644 --- a/.evergreen/config.in.yml +++ b/.evergreen/config.in.yml @@ -108,6 +108,15 @@ functions: args: - .evergreen/run-tests.sh + "perf send": + - command: subprocess.exec + params: + working_dir: src + binary: bash + add_expansions_to_env: true + args: + - .evergreen/perf-send.sh + "run serverless tests": - command: timeout.update params: diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 27c6ab3d078..d9f3b3a1f73 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -80,6 +80,14 @@ functions: binary: bash args: - .evergreen/run-tests.sh + perf send: + - command: subprocess.exec + params: + working_dir: src + binary: bash + add_expansions_to_env: true + args: + - .evergreen/perf-send.sh run serverless tests: - command: timeout.update params: @@ -1891,9 +1899,7 @@ tasks: - func: install dependencies - func: bootstrap mongo-orchestration - func: run spec driver benchmarks - - command: perf.send - params: - file: src/test/benchmarks/driver_bench/results.json + - func: perf send - name: run-spec-benchmark-tests-node-server-timeoutMS-120000 tags: - run-spec-benchmark-tests @@ -1912,9 +1918,7 @@ tasks: - func: install dependencies - func: bootstrap mongo-orchestration - func: run spec driver benchmarks - - command: perf.send - params: - file: src/test/benchmarks/driver_bench/results.json + - func: perf send - name: run-spec-benchmark-tests-node-server-timeoutMS-0 tags: - run-spec-benchmark-tests @@ -1933,9 +1937,7 @@ tasks: - func: install dependencies - func: bootstrap mongo-orchestration - func: run spec driver benchmarks - - command: perf.send - params: - file: src/test/benchmarks/driver_bench/results.json + - func: perf send - name: run-spec-benchmark-tests-node-server-monitorCommands-true tags: - run-spec-benchmark-tests @@ -1954,9 +1956,7 @@ tasks: - func: install dependencies - func: bootstrap mongo-orchestration - func: run spec driver benchmarks - - command: perf.send - params: - file: src/test/benchmarks/driver_bench/results.json + - func: perf send - name: run-spec-benchmark-tests-node-server-logging tags: - run-spec-benchmark-tests @@ -1975,9 +1975,7 @@ tasks: - func: install dependencies - func: bootstrap mongo-orchestration - func: run spec driver benchmarks - - command: perf.send - params: - file: src/test/benchmarks/driver_bench/results.json + - func: perf send - name: run-unit-tests-node-16 tags: - unit-tests diff --git a/.evergreen/generate_evergreen_tasks.js b/.evergreen/generate_evergreen_tasks.js index b4ff97688ee..87ac59b9086 100644 --- a/.evergreen/generate_evergreen_tasks.js +++ b/.evergreen/generate_evergreen_tasks.js @@ -765,12 +765,9 @@ function addPerformanceTasks() { ...[ 'install dependencies', 'bootstrap mongo-orchestration', - 'run spec driver benchmarks' - ].map(func => ({ func })), - { - command: 'perf.send', - params: { file: 'src/test/benchmarks/driver_bench/results.json' } - } + 'run spec driver benchmarks', + 'perf send' + ].map(func => ({ func })) ] }); diff --git a/.evergreen/perf-send.sh b/.evergreen/perf-send.sh new file mode 100644 index 00000000000..a3c7f234b82 --- /dev/null +++ b/.evergreen/perf-send.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -euox pipefail + +source $DRIVERS_TOOLS/.evergreen/init-node-and-npm-env.sh + +TARGET_FILE=$(realpath "${TARGET_FILE:-./test/benchmarks/driver_bench/results.json}") + +node ./.evergreen/perf_send.mjs $TARGET_FILE diff --git a/.evergreen/perf_send.mjs b/.evergreen/perf_send.mjs new file mode 100644 index 00000000000..5a3379df419 --- /dev/null +++ b/.evergreen/perf_send.mjs @@ -0,0 +1,63 @@ +import fs from 'fs/promises'; +import util from 'util'; + +const API_PATH = 'https://performance-monitoring-api.corp.mongodb.com/raw_perf_results'; + +const resultFile = process.argv[2]; +if (resultFile == null) { + throw new Error('Must specify result file'); +} + +// Get expansions +const { + execution, + requester, + project, + task_id, + task_name, + revision_order_id, + build_variant: variant, + version_id: version +} = process.env; + +const orderSplit = revision_order_id?.split('_'); +const order = Number(orderSplit ? orderSplit[orderSplit.length - 1] : undefined); + +if (!Number.isInteger(order)) throw new Error(`Failed to parse integer from order, revision_order_id=${revision_order_id}`); + +const results = JSON.parse(await fs.readFile(resultFile, 'utf8')); + +const body = { + id: { + project, + version, + variant, + order, + task_name, + task_id, + execution, + mainline: requester === 'commit' + }, + results +}; + +const resp = await fetch(API_PATH, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + accept: 'application/json' + }, + body: JSON.stringify(body) +}); + +const responseText = await resp.text(); +let jsonResponse = null; +try { + jsonResponse = JSON.parse(responseText) +} catch (cause) { + console.log('Failed to parse json response', cause); +} + +console.log(resp.statusText, util.inspect(jsonResponse ?? responseText, { depth: Infinity })); + +if (jsonResponse.message == null) throw new Error("Didn't get success message"); diff --git a/test/benchmarks/driver_bench/src/driver.mts b/test/benchmarks/driver_bench/src/driver.mts index 488d56a1afd..434c5b9d51b 100644 --- a/test/benchmarks/driver_bench/src/driver.mts +++ b/test/benchmarks/driver_bench/src/driver.mts @@ -145,6 +145,8 @@ export type MetricInfo = { test_name: string; args: Record; }; + created_at: string; + completed_at: string; metrics: Metric[]; }; @@ -161,6 +163,8 @@ export function metrics(test_name: string, result: number, tags?: string[]): Met ]) ) }, + created_at: new Date().toISOString(), + completed_at: new Date().toISOString(), // FIXME(NODE-6781): For now all of our metrics are of throughput so their improvement_direction is up, metrics: [ {