Skip to content

Commit e4e4b08

Browse files
committed
Fix memory leak by removing request.clone() usage
Identified that the memory leak in the project was linked to the usage of request.clone() within the `@sentry/remix` package's callRouteAction handler. Although initially suspected as a Sentry issue, the problem appears to arise from the handling of request.clone() in Remix version 2.1.0. By removing the call to request.clone(), the memory leak has been resolved. - Introduced garbage collection execution before snapshot to manage memory allocation effectively. - Improved error handling and timeout mechanisms in the memory leak detector to enhance its resilience during runtime. - Expanded testing for both GET and POST requests to monitor and validate potential memory leaks better. The POST requests involve sending large payloads to stress-test the system. - The modification particularly focuses on enhancing robust memory tracking and providing detailed progress reporting during request phases.
1 parent a0cbbef commit e4e4b08

File tree

1 file changed

+119
-13
lines changed

1 file changed

+119
-13
lines changed

apps/webapp/memory-leak-detector.js

Lines changed: 119 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ class MemoryLeakDetector {
4444

4545
// API endpoints to test (configurable)
4646
apiEndpoints: options.apiEndpoints || ["/api/v1/runs"],
47+
postApiEndpoints: options.postApiEndpoints || ["/api/v1/mock"],
4748

4849
// Memory analysis thresholds
4950
memoryLeakThreshold: options.memoryLeakThreshold || 50, // MB increase
@@ -110,6 +111,9 @@ class MemoryLeakDetector {
110111
let snapshotData;
111112

112113
if (this.options.adminToken) {
114+
this.log(`Running GC before snapshot...`);
115+
await this.runGc();
116+
113117
// Use the admin API endpoint to get actual V8 heap snapshot
114118
this.log(`Taking V8 heap snapshot via admin API: ${label}...`);
115119

@@ -211,6 +215,52 @@ class MemoryLeakDetector {
211215
});
212216
}
213217

218+
async runGc() {
219+
return new Promise((resolve, reject) => {
220+
const requestOptions = {
221+
hostname: "localhost",
222+
port: this.options.serverPort,
223+
path: "/admin/api/v1/gc",
224+
method: "GET",
225+
timeout: 120000, // GC can take a while
226+
headers: {
227+
Authorization: `Bearer ${this.options.adminToken}`,
228+
"User-Agent": "memory-leak-detector/1.0",
229+
},
230+
};
231+
232+
const req = http.request(requestOptions, (res) => {
233+
if (res.statusCode !== 200) {
234+
reject(new Error(`Admin API returned ${res.statusCode}: ${res.statusMessage}`));
235+
return;
236+
}
237+
238+
res.on("data", (chunk) => {
239+
this.log(`GC run completed`);
240+
});
241+
242+
res.on("end", () => {
243+
resolve();
244+
});
245+
246+
res.on("error", (error) => {
247+
reject(error);
248+
});
249+
});
250+
251+
req.on("error", (error) => {
252+
reject(new Error(`Failed to connect to admin API: ${error.message}`));
253+
});
254+
255+
req.on("timeout", () => {
256+
req.destroy();
257+
reject(new Error("Admin API request timeout"));
258+
});
259+
260+
req.end();
261+
});
262+
}
263+
214264
async startServer() {
215265
return new Promise((resolve, reject) => {
216266
this.log("Starting server...");
@@ -234,18 +284,22 @@ class MemoryLeakDetector {
234284
this.log(`Using NODE_PATH: ${nodePath}`);
235285

236286
// Start the server with memory inspection flags
237-
this.serverProcess = spawn("node", ["--max-old-space-size=16384", "--expose-gc", "./build/server.js"], {
238-
cwd: process.cwd(),
239-
stdio: ["ignore", "pipe", "pipe"],
240-
env: {
241-
...process.env,
242-
NODE_ENV: "production",
243-
NODE_PATH: nodePath,
244-
PORT: this.options.serverPort,
245-
// Disable Sentry to prevent memory leaks from it
246-
SENTRY_DSN: this.options.sentryDsn,
247-
},
248-
});
287+
this.serverProcess = spawn(
288+
"node",
289+
["--max-old-space-size=16384", "--expose-gc", "./build/server.js"],
290+
{
291+
cwd: process.cwd(),
292+
stdio: ["ignore", "pipe", "pipe"],
293+
env: {
294+
...process.env,
295+
NODE_ENV: "production",
296+
NODE_PATH: nodePath,
297+
PORT: this.options.serverPort,
298+
// Disable Sentry to prevent memory leaks from it
299+
SENTRY_DSN: this.options.sentryDsn,
300+
},
301+
}
302+
);
249303

250304
let serverReady = false;
251305
const timeout = setTimeout(() => {
@@ -346,7 +400,7 @@ class MemoryLeakDetector {
346400
}
347401

348402
async performRequestPhase(phaseName, numRequests) {
349-
this.log(`Starting ${phaseName} phase with ${numRequests} requests...`);
403+
this.log(`Starting ${phaseName} phase with ${numRequests} GET requests...`);
350404

351405
const startTime = performance.now();
352406
let successfulRequests = 0;
@@ -390,6 +444,58 @@ class MemoryLeakDetector {
390444
}
391445
}
392446

447+
this.log(`Continuing with ${phaseName} phase with ${numRequests} POST requests...`);
448+
449+
for (let i = 0; i < numRequests; i++) {
450+
const endpoint = this.options.postApiEndpoints[i % this.options.postApiEndpoints.length];
451+
452+
try {
453+
// Send a LARGE body to try and trigger a memory leak
454+
const response = await this.makeRequest(endpoint, {
455+
method: "POST",
456+
body: JSON.stringify(
457+
Array.from({ length: 1000 }, (_, index) => ({
458+
id: index,
459+
name: `Mock ${index}`,
460+
description: `Mock ${index} description`,
461+
createdAt: new Date().toISOString(),
462+
updatedAt: new Date().toISOString(),
463+
uuid: crypto.randomUUID(),
464+
}))
465+
),
466+
});
467+
468+
if (response.statusCode >= 200 && response.statusCode < 300) {
469+
successfulRequests++;
470+
} else {
471+
failedRequests++;
472+
errors.push({
473+
endpoint,
474+
statusCode: response.statusCode,
475+
error: `HTTP ${response.statusCode}`,
476+
phase: phaseName,
477+
});
478+
}
479+
} catch (error) {
480+
failedRequests++;
481+
errors.push({
482+
endpoint: error.endpoint,
483+
error: error.error?.message || "Unknown error",
484+
phase: phaseName,
485+
});
486+
}
487+
488+
// Add delay between requests
489+
if (i < numRequests - 1) {
490+
await this.delay(this.options.requestDelay);
491+
}
492+
493+
// Progress reporting
494+
if (this.options.verbose && numRequests > 25 && (i + 1) % 25 === 0) {
495+
this.log(`${phaseName}: ${i + 1}/${numRequests} requests completed`);
496+
}
497+
}
498+
393499
const endTime = performance.now();
394500
const duration = endTime - startTime;
395501

0 commit comments

Comments
 (0)