Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 39 additions & 12 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,9 @@
"sentry-cli": "bin/sentry-cli"
},
"dependencies": {
"https-proxy-agent": "^5.0.0",
"node-fetch": "^2.6.7",
"progress": "^2.0.3",
"proxy-from-env": "^1.1.0",
"undici": "^6.22.0",
"which": "^2.0.2"
},
"devDependencies": {
Expand Down
51 changes: 22 additions & 29 deletions scripts/install.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,10 @@ const fs = require('fs');
const os = require('os');
const path = require('path');
const crypto = require('crypto');
const zlib = require('zlib');
const stream = require('stream');
const process = require('process');

const fetch = require('node-fetch');
const HttpsProxyAgent = require('https-proxy-agent');
const { ProxyAgent, fetch } = require('undici');
const ProgressBar = require('progress');
const Proxy = require('proxy-from-env');
const which = require('which');
Expand Down Expand Up @@ -89,7 +87,7 @@ function getDownloadUrl(platform, arch) {
}

function createProgressBar(name, total) {
const incorrectTotal = typeof total !== 'number' || Number.isNaN(total);
const incorrectTotal = typeof total !== 'number' || Number.isNaN(total) || total <= 0;

if (incorrectTotal || !shouldRenderProgressBar()) {
return {
Expand Down Expand Up @@ -220,7 +218,7 @@ async function downloadBinary() {
}

const proxyUrl = Proxy.getProxyForUrl(downloadUrl);
const agent = proxyUrl ? new HttpsProxyAgent(proxyUrl) : null;
const dispatcher = proxyUrl ? new ProxyAgent(proxyUrl) : undefined;

logger.log(`Downloading from ${downloadUrl}`);

Expand All @@ -231,12 +229,8 @@ async function downloadBinary() {
let response;
try {
response = await fetch(downloadUrl, {
agent,
compress: false,
headers: {
'accept-encoding': 'gzip, deflate, br',
},
redirect: 'follow',
dispatcher,
});
} catch (error) {
let errorMsg = `Unable to download sentry-cli binary from ${downloadUrl}.\nError message: ${error.message}`;
Expand All @@ -254,39 +248,38 @@ async function downloadBinary() {
throw new Error(errorMsg);
}

const contentEncoding = response.headers.get('content-encoding');
let decompressor;
if (/\bgzip\b/.test(contentEncoding)) {
decompressor = zlib.createGunzip();
} else if (/\bdeflate\b/.test(contentEncoding)) {
decompressor = zlib.createInflate();
} else if (/\bbr\b/.test(contentEncoding)) {
decompressor = zlib.createBrotliDecompress();
} else {
decompressor = new stream.PassThrough();
}
const name = downloadUrl.match(/.*\/(.*?)$/)[1];
let downloadedBytes = 0;
const totalBytes = parseInt(response.headers.get('content-length'), 10);

// Note: content-length might not be available if response was compressed,
// as native fetch decompresses transparently
const contentLength = response.headers.get('content-length');
const totalBytes = contentLength ? parseInt(contentLength, 10) : 0;
const progressBar = createProgressBar(name, totalBytes);
const tempPath = getTempFile(cachedPath);
fs.mkdirSync(path.dirname(tempPath), { recursive: true });

await new Promise((resolve, reject) => {
response.body
// Convert Web ReadableStream to Node.js stream
const nodeStream = stream.Readable.fromWeb(response.body);

nodeStream
.on('error', (e) => reject(e))
.on('data', (chunk) => {
downloadedBytes += chunk.length;
progressBar.tick(chunk.length);

if (!progressBar.complete) {
progressBar.tick(chunk.length);
}
})
.pipe(decompressor)
.pipe(fs.createWriteStream(tempPath, { mode: '0755' }))
.on('error', (e) => reject(e))
.on('close', () => {
if (downloadedBytes >= totalBytes) {
resolve();
} else {
.on('finish', () => {
// Check if we have a total size to validate against
if (totalBytes > 0 && downloadedBytes < totalBytes) {
reject(new Error('connection interrupted'));
} else {
resolve();
}
Comment on lines +278 to 283

This comment was marked as outdated.

Comment on lines +277 to 283

This comment was marked as outdated.

Comment on lines +277 to 283

This comment was marked as outdated.

});
Comment on lines +277 to 284

This comment was marked as outdated.

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Download integrity check unreliable with compressed responses

The connection interrupted check compares downloadedBytes against totalBytes to detect truncated downloads. However, with undici's transparent decompression, downloadedBytes counts decompressed bytes while totalBytes may still be the compressed content-length (undici preserves this header). If a connection interrupts mid-transfer but enough compressed data was already received to decompress to more bytes than the compressed size, the check downloadedBytes < totalBytes evaluates to false, causing the download to be incorrectly accepted as complete. The old code avoided this by setting compress: false and tracking compressed bytes. While checksum validation provides a safety net, users who set SENTRYCLI_SKIP_CHECKSUM_VALIDATION=1 lose protection against corrupted downloads in this scenario.

Additional Locations (1)

Fix in Cursor Fix in Web

});
Expand Down
Loading