Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,9 @@ jobs:
changed_node:
${{ needs.job_get_metadata.outputs.changed_ci == 'true' || contains(steps.checkForAffected.outputs.affected,
'@sentry/node') }}
changed_node_overhead_action:
${{ needs.job_get_metadata.outputs.changed_ci == 'true' || contains(steps.checkForAffected.outputs.affected,
'@sentry-internal/node-overhead-gh-action') }}
changed_deno:
${{ needs.job_get_metadata.outputs.changed_ci == 'true' || contains(steps.checkForAffected.outputs.affected,
'@sentry/deno') }}
Expand Down Expand Up @@ -253,6 +256,37 @@ jobs:
# Only run comparison against develop if this is a PR
comparison_branch: ${{ (github.event_name == 'pull_request' && github.base_ref) || ''}}

job_node_overhead_check:
name: Node Overhead Check
needs: [job_get_metadata, job_build]
timeout-minutes: 15
runs-on: ubuntu-24.04
if:
(needs.job_build.outputs.changed_node == 'true' && github.event_name == 'pull_request') ||
(needs.job_build.outputs.changed_node_overhead_action == 'true' && github.event_name == 'pull_request') ||
needs.job_get_metadata.outputs.is_base_branch == 'true' || needs.job_get_metadata.outputs.is_release == 'true'
steps:
- name: Check out current commit (${{ needs.job_get_metadata.outputs.commit_label }})
uses: actions/checkout@v4
with:
ref: ${{ env.HEAD_COMMIT }}
- name: Set up Node
uses: actions/setup-node@v4
with:
node-version-file: 'package.json'
- name: Restore caches
uses: ./.github/actions/restore-cache
with:
dependency_cache_key: ${{ needs.job_build.outputs.dependency_cache_key }}
- name: Check node overhead
uses: ./dev-packages/node-overhead-gh-action
env:
DEBUG: '1'
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
# Only run comparison against develop if this is a PR
comparison_branch: ${{ (github.event_name == 'pull_request' && github.base_ref) || ''}}

job_lint:
name: Lint
# Even though the linter only checks source code, not built code, it needs the built code in order check that all
Expand Down
15 changes: 15 additions & 0 deletions dev-packages/node-overhead-gh-action/.eslintrc.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
module.exports = {
env: {
node: true,
},
extends: ['../../.eslintrc.js'],
overrides: [
{
files: ['**/*.mjs'],
parserOptions: {
project: ['tsconfig.json'],
sourceType: 'module',
},
},
],
};
3 changes: 3 additions & 0 deletions dev-packages/node-overhead-gh-action/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# node-overhead-gh-action

Capture the overhead of Sentry in a node app.
17 changes: 17 additions & 0 deletions dev-packages/node-overhead-gh-action/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: 'node-overhead-gh-action'
description: 'Run node overhead comparison'
inputs:
github_token:
required: true
description: 'a github access token'
comparison_branch:
required: false
default: ''
description: 'If set, compare the current branch with this branch'
threshold:
required: false
default: '3'
description: 'The percentage threshold for size changes before posting a comment'
runs:
using: 'node24'
main: 'index.mjs'
25 changes: 25 additions & 0 deletions dev-packages/node-overhead-gh-action/db/init/init.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
CREATE DATABASE mydb;
USE mydb

-- SQL script to create the 'users' table and insert initial data.

-- 1. Create the 'users' table
-- This table stores basic user information.
-- 'id' is the primary key and will automatically increment for each new record.
-- 'name' stores the user's name, up to 255 characters.
-- 'age' stores the user's age as an integer.

CREATE TABLE users (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(255) NOT NULL,
age INT
);

-- 2. Insert 5 rows into the 'users' table
-- Populating the table with some sample data.

INSERT INTO users (name, age) VALUES ('Alice Johnson', 28);
INSERT INTO users (name, age) VALUES ('Bob Smith', 45);
INSERT INTO users (name, age) VALUES ('Charlie Brown', 32);
INSERT INTO users (name, age) VALUES ('Diana Prince', 25);
INSERT INTO users (name, age) VALUES ('Ethan Hunt', 41);
12 changes: 12 additions & 0 deletions dev-packages/node-overhead-gh-action/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
services:
db:
image: mysql:8
restart: always
container_name: node-overhead-gh-action-mysql
ports:
- '3306:3306'
environment:
MYSQL_ROOT_PASSWORD: password
volumes:
# - ./db/data:/var/lib/mysql
- ./db/init:/docker-entrypoint-initdb.d/:ro
236 changes: 236 additions & 0 deletions dev-packages/node-overhead-gh-action/index.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,236 @@
import { promises as fs } from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { DefaultArtifactClient } from '@actions/artifact';
import * as core from '@actions/core';
import { exec } from '@actions/exec';
import { context, getOctokit } from '@actions/github';
import * as glob from '@actions/glob';
import * as io from '@actions/io';
import { markdownTable } from 'markdown-table';
import { getArtifactsForBranchAndWorkflow } from './lib/getArtifactsForBranchAndWorkflow.mjs';
import { getAveragedOverheadMeasurements } from './lib/getOverheadMeasurements.mjs';
import { formatResults, hasChanges } from './lib/markdown-table-formatter.mjs';

const NODE_OVERHEAD_HEADING = '## node-overhead report 🧳';
const ARTIFACT_NAME = 'node-overhead-action';
const RESULTS_FILE = 'node-overhead-results.json';

function getResultsFilePath() {
const __dirname = path.dirname(fileURLToPath(import.meta.url));
return path.resolve(__dirname, RESULTS_FILE);
}

const { getInput, setFailed } = core;

async function fetchPreviousComment(octokit, repo, pr) {
const { data: commentList } = await octokit.rest.issues.listComments({
...repo,
issue_number: pr.number,
});

return commentList.find(comment => comment.body.startsWith(NODE_OVERHEAD_HEADING));
}

async function run() {
const __dirname = path.dirname(fileURLToPath(import.meta.url));

try {
const { payload, repo } = context;
const pr = payload.pull_request;

const comparisonBranch = getInput('comparison_branch');
const githubToken = getInput('github_token');
const threshold = getInput('threshold') || 1;

if (comparisonBranch && !pr) {
throw new Error('No PR found. Only pull_request workflows are supported.');
}

const octokit = getOctokit(githubToken);
const resultsFilePath = getResultsFilePath();

// If we have no comparison branch, we just run overhead check & store the result as artifact
if (!comparisonBranch) {
return runNodeOverheadOnComparisonBranch();
}

// Else, we run overhead check for the current branch, AND fetch it for the comparison branch
let base;
let current;
let baseIsNotLatest = false;
let baseWorkflowRun;

try {
const workflowName = `${process.env.GITHUB_WORKFLOW || ''}`;
core.startGroup(`getArtifactsForBranchAndWorkflow - workflow:"${workflowName}", branch:"${comparisonBranch}"`);
const artifacts = await getArtifactsForBranchAndWorkflow(octokit, {
...repo,
artifactName: ARTIFACT_NAME,
branch: comparisonBranch,
workflowName,
});
core.endGroup();

if (!artifacts) {
throw new Error('No artifacts found');
}

baseWorkflowRun = artifacts.workflowRun;

await downloadOtherWorkflowArtifact(octokit, {
...repo,
artifactName: ARTIFACT_NAME,
artifactId: artifacts.artifact.id,
downloadPath: __dirname,
});

base = JSON.parse(await fs.readFile(resultsFilePath, { encoding: 'utf8' }));

if (!artifacts.isLatest) {
baseIsNotLatest = true;
core.info('Base artifact is not the latest one. This may lead to incorrect results.');
}
} catch (error) {
core.startGroup('Warning, unable to find base results');
core.error(error);
core.endGroup();
}

core.startGroup('Getting current overhead measurements');
try {
current = await getAveragedOverheadMeasurements();
} catch (error) {
core.error('Error getting current overhead measurements');
core.endGroup();
throw error;
}
core.debug(`Current overhead measurements: ${JSON.stringify(current, null, 2)}`);
core.endGroup();

const thresholdNumber = Number(threshold);

const nodeOverheadComment = await fetchPreviousComment(octokit, repo, pr);

if (nodeOverheadComment) {
core.debug('Found existing node overhead comment, updating it instead of creating a new one...');
}

const shouldComment = isNaN(thresholdNumber) || hasChanges(base, current, thresholdNumber) || nodeOverheadComment;

if (shouldComment) {
const bodyParts = [
NODE_OVERHEAD_HEADING,
'Note: This is a synthetic benchmark with a minimal express app and does not necessarily reflect the real-world performance impact in an application.',
];

if (baseIsNotLatest) {
bodyParts.push(
'⚠️ **Warning:** Base artifact is not the latest one, because the latest workflow run is not done yet. This may lead to incorrect results. Try to re-run all tests to get up to date results.',
);
}
try {
bodyParts.push(markdownTable(formatResults(base, current)));
} catch (error) {
core.error('Error generating markdown table');
throw error;
}

if (baseWorkflowRun) {
bodyParts.push('');
bodyParts.push(`[View base workflow run](${baseWorkflowRun.html_url})`);
}

const body = bodyParts.join('\r\n');

try {
if (!nodeOverheadComment) {
await octokit.rest.issues.createComment({
...repo,
issue_number: pr.number,
body,
});
} else {
await octokit.rest.issues.updateComment({
...repo,
comment_id: nodeOverheadComment.id,
body,
});
}
} catch (error) {
core.error(
"Error updating comment. This can happen for PR's originating from a fork without write permissions.",
);
}
} else {
core.debug('Skipping comment because there are no changes.');
}
} catch (error) {
core.error(error);
setFailed(error.message);
}
}

async function runNodeOverheadOnComparisonBranch() {
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const resultsFilePath = getResultsFilePath();

const artifactClient = new DefaultArtifactClient();

const result = await getAveragedOverheadMeasurements();

try {
await fs.writeFile(resultsFilePath, JSON.stringify(result), 'utf8');
} catch (error) {
core.error('Error parsing node overhead output. The output should be a json.');
throw error;
}

const globber = await glob.create(resultsFilePath, {
followSymbolicLinks: false,
});
const files = await globber.glob();

await artifactClient.uploadArtifact(ARTIFACT_NAME, files, __dirname);
}

run();

/**
* Use GitHub API to fetch artifact download url, then
* download and extract artifact to `downloadPath`
*/
async function downloadOtherWorkflowArtifact(octokit, { owner, repo, artifactId, artifactName, downloadPath }) {
const artifact = await octokit.rest.actions.downloadArtifact({
owner,
repo,
artifact_id: artifactId,
archive_format: 'zip',
});

// Make sure output path exists
try {
await io.mkdirP(downloadPath);
} catch {
// ignore errors
}

const downloadFile = path.resolve(downloadPath, `${artifactName}.zip`);

await exec('wget', [
'-nv',
'--retry-connrefused',
'--waitretry=1',
'--read-timeout=20',
'--timeout=15',
'-t',
'0',
'-O',
downloadFile,
artifact.url,
]);

await exec('unzip', ['-q', '-d', downloadPath, downloadFile], {
silent: true,
});
}
Loading
Loading