Skip to content

Commit 8b51346

Browse files
committed
feat(auth): add EntraId integration tests
- Add integration tests for token renewal and re-authentication flows - Update credentials provider to use uniqueId as username instead of account username - Add test utilities for loading Redis endpoint configurations - Split TypeScript configs into separate files for samples and integration tests
1 parent ac972bd commit 8b51346

14 files changed

+367
-66
lines changed

packages/authx/lib/token-manager.spec.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ describe('TokenManager', () => {
328328
assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token after failure');
329329
assert.equal(listener.errors.length, 1, 'Should receive error');
330330
assert.equal(listener.errors[0].message, 'Fatal error', 'Should have correct error message');
331-
assert.equal(listener.errors[0].isFatal, true, 'Should be a fatal error');
331+
assert.equal(listener.errors[0].isRetryable, false, 'Should be a fatal error');
332332

333333
// verify that the token manager is stopped and no more requests are made after the error and expected refresh time
334334
await delay(80);
@@ -352,7 +352,7 @@ describe('TokenManager', () => {
352352
initialDelayMs: 100,
353353
maxDelayMs: 1000,
354354
backoffMultiplier: 2,
355-
shouldRetry: (error: unknown) => error instanceof Error && error.message === 'Temporary failure'
355+
isRetryable: (error: unknown) => error instanceof Error && error.message === 'Temporary failure'
356356
}
357357
};
358358

@@ -389,7 +389,7 @@ describe('TokenManager', () => {
389389
// Should have first error but not stop due to retry config
390390
assert.equal(listener.errors.length, 1, 'Should have first error');
391391
assert.ok(listener.errors[0].message.includes('attempt 1'), 'Error should indicate first attempt');
392-
assert.equal(listener.errors[0].isFatal, false, 'Should not be a fatal error');
392+
assert.equal(listener.errors[0].isRetryable, true, 'Should not be a fatal error');
393393
assert.equal(manager.isRunning(), true, 'Should continue running during retries');
394394

395395
// Advance past first retry (delay: 100ms due to backoff)
@@ -401,7 +401,7 @@ describe('TokenManager', () => {
401401

402402
assert.equal(listener.errors.length, 2, 'Should have second error');
403403
assert.ok(listener.errors[1].message.includes('attempt 2'), 'Error should indicate second attempt');
404-
assert.equal(listener.errors[0].isFatal, false, 'Should not be a fatal error');
404+
assert.equal(listener.errors[0].isRetryable, true, 'Should not be a fatal error');
405405
assert.equal(manager.isRunning(), true, 'Should continue running during retries');
406406

407407
// Advance past second retry (delay: 200ms due to backoff)
@@ -435,7 +435,7 @@ describe('TokenManager', () => {
435435
maxDelayMs: 1000,
436436
backoffMultiplier: 2,
437437
jitterPercentage: 0,
438-
shouldRetry: (error: unknown) => error instanceof Error && error.message === 'Temporary failure'
438+
isRetryable: (error: unknown) => error instanceof Error && error.message === 'Temporary failure'
439439
}
440440
};
441441

@@ -470,7 +470,7 @@ describe('TokenManager', () => {
470470
// First error
471471
assert.equal(listener.errors.length, 1, 'Should have first error');
472472
assert.equal(manager.isRunning(), true, 'Should continue running after first error');
473-
assert.equal(listener.errors[0].isFatal, false, 'Should not be a fatal error');
473+
assert.equal(listener.errors[0].isRetryable, true, 'Should not be a fatal error');
474474

475475
// Advance past first retry
476476
await delay(100);
@@ -483,7 +483,7 @@ describe('TokenManager', () => {
483483
// Second error
484484
assert.equal(listener.errors.length, 2, 'Should have second error');
485485
assert.equal(manager.isRunning(), true, 'Should continue running after second error');
486-
assert.equal(listener.errors[1].isFatal, false, 'Should not be a fatal error');
486+
assert.equal(listener.errors[1].isRetryable, true, 'Should not be a fatal error');
487487

488488
// Advance past second retry
489489
await delay(200);
@@ -495,7 +495,7 @@ describe('TokenManager', () => {
495495

496496
// Should stop after max retries
497497
assert.equal(listener.errors.length, 3, 'Should have final error');
498-
assert.equal(listener.errors[2].isFatal, true, 'Should not be a fatal error');
498+
assert.equal(listener.errors[2].isRetryable, false, 'Should be a fatal error');
499499
assert.equal(manager.isRunning(), false, 'Should stop after max retries exceeded');
500500
assert.equal(identityProvider.getRequestCount(), 4, 'Should have made exactly 4 requests');
501501

packages/authx/lib/token-manager.ts

Lines changed: 70 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,68 @@ import { Token } from './token';
55
* The configuration for retrying token refreshes.
66
*/
77
export interface RetryPolicy {
8-
// The maximum number of attempts to retry token refreshes.
8+
/**
9+
* The maximum number of attempts to retry token refreshes.
10+
*/
911
maxAttempts: number;
10-
// The initial delay in milliseconds before the first retry.
12+
13+
/**
14+
* The initial delay in milliseconds before the first retry.
15+
*/
1116
initialDelayMs: number;
12-
// The maximum delay in milliseconds between retries (the calculated delay will be capped at this value).
17+
18+
/**
19+
* The maximum delay in milliseconds between retries.
20+
* The calculated delay will be capped at this value.
21+
*/
1322
maxDelayMs: number;
14-
// The multiplier for exponential backoff between retries. e.g. 2 will double the delay each time.
23+
24+
/**
25+
* The multiplier for exponential backoff between retries.
26+
* @example
27+
* A value of 2 will double the delay each time:
28+
* - 1st retry: initialDelayMs
29+
* - 2nd retry: initialDelayMs * 2
30+
* - 3rd retry: initialDelayMs * 4
31+
*/
1532
backoffMultiplier: number;
16-
// The percentage of jitter to apply to the delay. e.g. 0.1 will add or subtract up to 10% of the delay.
33+
34+
/**
35+
* The percentage of jitter to apply to the delay.
36+
* @example
37+
* A value of 0.1 will add or subtract up to 10% of the delay.
38+
*/
1739
jitterPercentage?: number;
18-
// A custom function to determine if a retry should be attempted based on the error and attempt number.
19-
shouldRetry?: (error: unknown, attempt: number) => boolean;
40+
41+
/**
42+
* Function to classify errors from the identity provider as retryable or non-retryable.
43+
* Used to determine if a token refresh failure should be retried based on the type of error.
44+
*
45+
* Common use cases:
46+
* - Network errors that may be transient (should retry)
47+
* - Invalid credentials (should not retry)
48+
* - Rate limiting responses (should retry)
49+
*
50+
* @param error - The error from the identity provider
51+
* @param attempt - Current retry attempt (0-based)
52+
* @returns `true` if the error is considered transient and the operation should be retried
53+
*
54+
* @example
55+
* ```typescript
56+
* const retryPolicy: RetryPolicy = {
57+
* maxAttempts: 3,
58+
* initialDelayMs: 1000,
59+
* maxDelayMs: 5000,
60+
* backoffMultiplier: 2,
61+
* isRetryable: (error) => {
62+
* // Retry on network errors or rate limiting
63+
* return error instanceof NetworkError ||
64+
* error instanceof RateLimitError;
65+
* }
66+
* };
67+
* ```
68+
*/
69+
isRetryable?: (error: unknown, attempt: number) => boolean;
2070
}
2171

2272
/**
@@ -36,14 +86,13 @@ export interface TokenManagerConfig {
3686
}
3787

3888
/**
39-
* IDPError is an error that occurs while calling the underlying IdentityProvider.
89+
* IDPError indicates a failure from the identity provider.
4090
*
41-
* It can be transient and if retry policy is configured, the token manager will attempt to obtain a token again.
42-
* This means that receiving non-fatal error is not a stream termination event.
43-
* The stream will be terminated only if the error is fatal.
91+
* The `isRetryable` flag is determined by the RetryPolicy's error classification function - if an error is
92+
* classified as retryable, it will be marked as transient and the token manager will attempt to recover.
4493
*/
4594
export class IDPError extends Error {
46-
constructor(public readonly message: string, public readonly isFatal: boolean) {
95+
constructor(public readonly message: string, public readonly isRetryable: boolean) {
4796
super(message);
4897
this.name = 'IDPError';
4998
}
@@ -105,7 +154,6 @@ export class TokenManager<T> {
105154
*/
106155
public start(listener: TokenStreamListener<T>, initialDelayMs: number = 0): Disposable {
107156
if (this.listener) {
108-
console.log('TokenManager is already running, stopping the previous instance');
109157
this.stop();
110158
}
111159

@@ -142,14 +190,14 @@ export class TokenManager<T> {
142190
private shouldRetry(error: unknown): boolean {
143191
if (!this.config.retry) return false;
144192

145-
const { maxAttempts, shouldRetry } = this.config.retry;
193+
const { maxAttempts, isRetryable } = this.config.retry;
146194

147195
if (this.retryAttempt >= maxAttempts) {
148196
return false;
149197
}
150198

151-
if (shouldRetry) {
152-
return shouldRetry(error, this.retryAttempt);
199+
if (isRetryable) {
200+
return isRetryable(error, this.retryAttempt);
153201
}
154202

155203
return false;
@@ -172,16 +220,16 @@ export class TokenManager<T> {
172220
if (this.shouldRetry(error)) {
173221
this.retryAttempt++;
174222
const retryDelay = this.calculateRetryDelay();
175-
this.notifyError(`Token refresh failed (attempt ${this.retryAttempt}), retrying in ${retryDelay}ms: ${error}`, false)
223+
this.notifyError(`Token refresh failed (attempt ${this.retryAttempt}), retrying in ${retryDelay}ms: ${error}`, true)
176224
this.scheduleNextRefresh(retryDelay);
177225
} else {
178-
this.notifyError(error, true);
226+
this.notifyError(error, false);
179227
this.stop();
180228
}
181229
}
182230
}
183231

184-
private handleNewToken = async ({ token: nativeToken, ttlMs }: TokenResponse<T>): Promise<void> => {
232+
private async handleNewToken({ token: nativeToken, ttlMs }: TokenResponse<T>): Promise<void> {
185233
if (!this.listener) {
186234
throw new Error('TokenManager is not running, but a new token was received');
187235
}
@@ -255,13 +303,14 @@ export class TokenManager<T> {
255303
return this.currentToken;
256304
}
257305

258-
private notifyError = (error: unknown, isFatal: boolean): void => {
306+
307+
private notifyError(error: unknown, isRetryable: boolean): void {
259308
const errorMessage = error instanceof Error ? error.message : String(error);
260309

261310
if (!this.listener) {
262311
throw new Error(`TokenManager is not running but received an error: ${errorMessage}`);
263312
}
264313

265-
this.listener.onError(new IDPError(errorMessage, isFatal));
314+
this.listener.onError(new IDPError(errorMessage, isRetryable));
266315
}
267316
}

packages/client/lib/client/index.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -399,14 +399,12 @@ export default class RedisClient<
399399
onNext: credentials => {
400400
this.reAuthenticate(credentials).catch(error => {
401401
const errorMessage = error instanceof Error ? error.message : String(error);
402-
console.error('Error during re-authentication', errorMessage);
403402
cp.onReAuthenticationError(new CredentialsError(errorMessage));
404403
});
405404

406405
},
407406
onError: (e: Error) => {
408407
const errorMessage = `Error from streaming credentials provider: ${e.message}`;
409-
console.error(errorMessage);
410408
cp.onReAuthenticationError(new UnableToObtainNewCredentialsError(errorMessage));
411409
}
412410
});

0 commit comments

Comments
 (0)