Skip to content

Commit 67dd7e4

Browse files
committed
test: fix circuit breaker close after shutdown
1 parent b640ed3 commit 67dd7e4

File tree

3 files changed

+25
-23
lines changed

3 files changed

+25
-23
lines changed

README.md

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# ResilientLLM
22

3-
A robust LLM integration layer designed to ensure reliable, seamless interactions across multiple APIs by intelligently handling failures and rate limits.
3+
A simple but robust LLM integration layer designed to ensure reliable, seamless interactions across multiple APIs by intelligently handling failures and rate limits.
44

55
## Motivation
66

@@ -34,7 +34,9 @@ const llm = new ResilientLLM({
3434
rateLimitConfig: {
3535
requestsPerMinute: 60, // Limit to 60 requests per minute
3636
llmTokensPerMinute: 90000 // Limit to 90,000 LLM tokens per minute
37-
}
37+
},
38+
retries: 3,
39+
backoffFactor: 2
3840
});
3941
4042
const conversationHistory = [

ResilientOperation.js

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -244,10 +244,10 @@ class ResilientOperation {
244244
*/
245245
async _executeBasic(asyncFn, config, ...args) {
246246

247-
let attempt = 0;
247+
let retryAttempt = 0;
248248
let delay = 1000;
249249

250-
while (attempt <= config.retries) {
250+
while (retryAttempt <= config.retries) {
251251
try {
252252
// Check circuit breaker first
253253
if (this.circuitBreaker.isCircuitOpen()) {
@@ -275,13 +275,8 @@ class ResilientOperation {
275275
this.circuitBreaker.recordSuccess();
276276

277277
// Log success with retry information
278-
if (attempt > 0) {
279-
const status = this.circuitBreaker.getStatus();
280-
console.log(`[ResilientOperation][${this.id}] Operation succeeded after ${attempt} retries. Current fail count: ${status.failCount}/${status.failureThreshold}`);
281-
} else {
282-
const status = this.circuitBreaker.getStatus();
283-
console.log(`[ResilientOperation][${this.id}] Operation succeeded on first attempt. Current fail count: ${status.failCount}/${status.failureThreshold}`);
284-
}
278+
const status = this.circuitBreaker.getStatus();
279+
console.log(`[ResilientOperation][${this.id}] Operation succeeded after ${retryAttempt} retries. Current fail count: ${status.failCount}/${status.failureThreshold}`);
285280

286281
return result;
287282
} catch (err) {
@@ -303,17 +298,17 @@ class ResilientOperation {
303298
this.circuitBreaker.recordFailure();
304299

305300
// Log retry attempt with circuit breaker status
306-
const remainingRetries = config.retries - attempt;
301+
const remainingRetries = config.retries - retryAttempt;
307302
const status = this.circuitBreaker.getStatus();
308303

309-
console.log(`[ResilientOperation][${this.id}] Attempt ${attempt + 1} failed: ${err.message}. Retries remaining: ${remainingRetries}. Circuit breaker fail count: ${status.failCount}/${status.failureThreshold}`);
304+
console.log(`[ResilientOperation][${this.id}] Attempt ${retryAttempt + 1} failed: ${err.message}. Retries remaining: ${remainingRetries}. Circuit breaker fail count: ${status.failCount}/${status.failureThreshold}`);
310305
if(status?.isOpen) {
311306
console.log(`[ResilientOperation][${this.id}] Circuit breaker is open. Cooldown remaining: ${status.cooldownRemaining}ms`);
312307
}
313308

314-
if (!this._shouldRetry(err) || attempt >= config.retries) {
309+
if (!this._shouldRetry(err) || retryAttempt >= config.retries) {
315310
// Log final failure - this operation has exhausted all retries
316-
console.log(`[ResilientOperation][${this.id}] Operation failed after ${attempt + 1} attempts. Circuit breaker fail count: ${status.failCount}/${status.failureThreshold}`);
311+
console.log(`[ResilientOperation][${this.id}] Operation failed after ${retryAttempt + 1} attempts. Circuit breaker fail count: ${status.failCount}/${status.failureThreshold}`);
317312
throw err;
318313
}
319314

@@ -323,7 +318,7 @@ class ResilientOperation {
323318
this.nextRetryDelay = null;
324319
await sleep(waitTime, this._abortController.signal);
325320
delay *= config.backoffFactor;
326-
attempt++;
321+
retryAttempt++;
327322
}
328323
}
329324
console.log(`[ResilientOperation][${this.id}] Exiting execution attempt loop`);

test/resilient-operation.e2e.test.js

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -234,40 +234,45 @@ describe('ResilientOperation E2E Tests', () => {
234234
}).timeout(50000);
235235

236236
// Circuit breaker close test
237-
it('should close circuit breaker after cooldown period', async () => {
237+
it.only('should close circuit breaker after cooldown period', async () => {
238238
// Create a ResilientOperation with short cooldown for testing
239239
const testResilientOp = new ResilientOperation({
240240
bucketId: 'cooldown-test',
241241
circuitBreakerConfig: { failureThreshold: 3, cooldownPeriod: 3000 }, // 1 second cooldown
242242
retries: 0, // Disable retries to see pure circuit breaker behavior
243243
});
244244

245-
const mockAsyncFn = sinon.stub().callsFake(async () => {
245+
const failingMocAsyncFn = sinon.stub().callsFake(async () => {
246246
const error = new Error('Service down');
247247
error.response = { status: 500 };
248248
throw error;
249249
});
250250

251251
// Make enough calls to open the circuit breaker
252252
const promises = [];
253-
for (let i = 0; i < 4; i++) {
254-
promises.push(testResilientOp.execute(mockAsyncFn).catch(err => err));
253+
for (let i = 0; i < 3; i++) {
254+
promises.push(testResilientOp.execute(failingMocAsyncFn).catch(err => err));
255255
}
256256

257257
await Promise.all(promises);
258258

259259
// Circuit breaker should be open
260260
let status = testResilientOp.circuitBreaker.getStatus();
261261
expect(status.isOpen).to.be.true;
262-
expect(status.failCount).to.be.at.least(3);
262+
expect(status.failCount).to.be.equal(3);
263263

264264
// Wait for cooldown period to expire
265-
await new Promise(resolve => setTimeout(resolve, 3100));
265+
await new Promise(resolve => setTimeout(resolve, 4100));
266+
267+
const successMocAsyncFn = sinon.stub().callsFake(async () => {
268+
return { data: 'success' };
269+
});
270+
await testResilientOp.execute(successMocAsyncFn).catch(err => err)
266271

267272
// Circuit breaker should automatically close
268273
status = testResilientOp.circuitBreaker.getStatus();
269274
expect(status.isOpen).to.be.false;
270-
expect(status.failCount).to.equal(0);
275+
expect(status.failCount).to.be.equal(0);
271276
}).timeout(10000);
272277

273278
// Circuit breaker open test

0 commit comments

Comments
 (0)