Skip to content

Commit c7ba971

Browse files
committed
fix: resolve linting errors and format code
- Fix strict-boolean-expressions for nullable config checks - Fix nullable string check in QueueFlushingPlugin - Run prettier on all modified files
1 parent 797cab9 commit c7ba971

File tree

6 files changed

+47
-24
lines changed

6 files changed

+47
-24
lines changed

packages/core/src/backoff/BackoffManager.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,9 @@ export class BackoffManager {
159159
const nextRetryTime = now + backoffSeconds * 1000;
160160

161161
this.logger?.info(
162-
`Transient error (${statusCode}): backoff ${backoffSeconds.toFixed(1)}s, attempt ${newRetryCount}/${this.config.maxRetryCount}`
162+
`Transient error (${statusCode}): backoff ${backoffSeconds.toFixed(
163+
1
164+
)}s, attempt ${newRetryCount}/${this.config.maxRetryCount}`
163165
);
164166

165167
return {

packages/core/src/backoff/RetryManager.ts

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,8 @@ export class RetryManager {
9999
}
100100

101101
const waitSeconds = Math.ceil((state.waitUntilTime - now) / 1000);
102-
const stateType = state.state === 'RATE_LIMITED' ? 'rate limited' : 'backing off';
102+
const stateType =
103+
state.state === 'RATE_LIMITED' ? 'rate limited' : 'backing off';
103104
this.logger?.info(
104105
`Upload blocked: ${stateType}, retry in ${waitSeconds}s (retry ${state.retryCount})`
105106
);
@@ -113,7 +114,7 @@ export class RetryManager {
113114
* @param retryAfterSeconds - Delay in seconds from Retry-After header (validated and clamped)
114115
*/
115116
async handle429(retryAfterSeconds: number): Promise<void> {
116-
if (!this.rateLimitConfig?.enabled) {
117+
if (this.rateLimitConfig?.enabled !== true) {
117118
return;
118119
}
119120

@@ -148,7 +149,7 @@ export class RetryManager {
148149
* Uses exponential backoff to calculate wait time.
149150
*/
150151
async handleTransientError(): Promise<void> {
151-
if (!this.backoffConfig?.enabled) {
152+
if (this.backoffConfig?.enabled !== true) {
152153
return;
153154
}
154155

@@ -224,9 +225,12 @@ export class RetryManager {
224225
? Math.max(state.waitUntilTime, waitUntilTime)
225226
: waitUntilTime;
226227

227-
const stateType = newState === 'RATE_LIMITED' ? 'Rate limited (429)' : 'Transient error';
228+
const stateType =
229+
newState === 'RATE_LIMITED' ? 'Rate limited (429)' : 'Transient error';
228230
this.logger?.info(
229-
`${stateType}: waiting ${Math.ceil((finalWaitUntilTime - now) / 1000)}s before retry ${newRetryCount}`
231+
`${stateType}: waiting ${Math.ceil(
232+
(finalWaitUntilTime - now) / 1000
233+
)}s before retry ${newRetryCount}`
230234
);
231235

232236
return {
@@ -246,7 +250,8 @@ export class RetryManager {
246250
return 0;
247251
}
248252

249-
const { baseBackoffInterval, maxBackoffInterval, jitterPercent } = this.backoffConfig;
253+
const { baseBackoffInterval, maxBackoffInterval, jitterPercent } =
254+
this.backoffConfig;
250255

251256
// Base exponential backoff: base * 2^retryCount
252257
const exponentialBackoff = baseBackoffInterval * Math.pow(2, retryCount);

packages/core/src/backoff/UploadStateMachine.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,9 @@ export class UploadStateMachine {
156156
: waitUntilTime;
157157

158158
this.logger?.info(
159-
`Rate limited (429): waiting ${Math.ceil((finalWaitUntilTime - now) / 1000)}s before retry ${newRetryCount}/${this.config.maxRetryCount}`
159+
`Rate limited (429): waiting ${Math.ceil(
160+
(finalWaitUntilTime - now) / 1000
161+
)}s before retry ${newRetryCount}/${this.config.maxRetryCount}`
160162
);
161163

162164
return {

packages/core/src/backoff/__tests__/BackoffManager.test.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -187,11 +187,15 @@ describe('BackoffManager', () => {
187187
expect(await bm.canRetry()).toBe(false);
188188

189189
// Should not be ready 1ms before expected delay
190-
jest.spyOn(Date, 'now').mockReturnValue(currentTime + expectedDelays[i] - 1);
190+
jest
191+
.spyOn(Date, 'now')
192+
.mockReturnValue(currentTime + expectedDelays[i] - 1);
191193
expect(await bm.canRetry()).toBe(false);
192194

193195
// Should be ready at expected delay
194-
jest.spyOn(Date, 'now').mockReturnValue(currentTime + expectedDelays[i]);
196+
jest
197+
.spyOn(Date, 'now')
198+
.mockReturnValue(currentTime + expectedDelays[i]);
195199
expect(await bm.canRetry()).toBe(true);
196200
}
197201
});

packages/core/src/plugins/QueueFlushingPlugin.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,9 @@ export class QueueFlushingPlugin extends UtilityPlugin {
7676
async flush() {
7777
// Safety: prevent concurrent flush operations
7878
if (this.flushPromise) {
79-
this.analytics?.logger.info('Flush already in progress, waiting for completion');
79+
this.analytics?.logger.info(
80+
'Flush already in progress, waiting for completion'
81+
);
8082
await this.flushPromise;
8183
return;
8284
}
@@ -153,7 +155,7 @@ export class QueueFlushingPlugin extends UtilityPlugin {
153155

154156
const idsToRemove = new Set(messageIds);
155157
const filteredEvents = state.events.filter(
156-
(e) => !e.messageId || !idsToRemove.has(e.messageId)
158+
(e) => e.messageId == null || !idsToRemove.has(e.messageId)
157159
);
158160

159161
return { events: filteredEvents };

packages/core/src/plugins/SegmentDestination.ts

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,7 @@ import type { SegmentClient } from '../analytics';
1212
import { DestinationMetadataEnrichment } from './DestinationMetadataEnrichment';
1313
import { QueueFlushingPlugin } from './QueueFlushingPlugin';
1414
import { defaultApiHost, defaultConfig } from '../constants';
15-
import {
16-
translateHTTPError,
17-
classifyError,
18-
parseRetryAfter,
19-
} from '../errors';
15+
import { translateHTTPError, classifyError, parseRetryAfter } from '../errors';
2016
import { RetryManager } from '../backoff/RetryManager';
2117

2218
const MAX_EVENTS_PER_BATCH = 100;
@@ -66,7 +62,9 @@ export class SegmentDestination extends DestinationPlugin {
6662
*/
6763
private async uploadBatch(batch: SegmentEvent[]): Promise<BatchResult> {
6864
const config = this.analytics?.getConfig() ?? defaultConfig;
69-
const messageIds = batch.map((e) => e.messageId).filter((id): id is string => !!id);
65+
const messageIds = batch
66+
.map((e) => e.messageId)
67+
.filter((id): id is string => !!id);
7068

7169
try {
7270
const res = await uploadEvents({
@@ -95,9 +93,12 @@ export class SegmentDestination extends DestinationPlugin {
9593

9694
// Classify error
9795
const classification = classifyError(res.status, {
98-
default4xxBehavior: config.httpConfig?.backoffConfig?.default4xxBehavior,
99-
default5xxBehavior: config.httpConfig?.backoffConfig?.default5xxBehavior,
100-
statusCodeOverrides: config.httpConfig?.backoffConfig?.statusCodeOverrides,
96+
default4xxBehavior:
97+
config.httpConfig?.backoffConfig?.default4xxBehavior,
98+
default5xxBehavior:
99+
config.httpConfig?.backoffConfig?.default5xxBehavior,
100+
statusCodeOverrides:
101+
config.httpConfig?.backoffConfig?.statusCodeOverrides,
101102
rateLimitEnabled: config.httpConfig?.rateLimitConfig?.enabled,
102103
});
103104

@@ -233,7 +234,9 @@ export class SegmentDestination extends DestinationPlugin {
233234

234235
// Handle successes - dequeue
235236
if (aggregation.successfulMessageIds.length > 0) {
236-
await this.queuePlugin.dequeueByMessageIds(aggregation.successfulMessageIds);
237+
await this.queuePlugin.dequeueByMessageIds(
238+
aggregation.successfulMessageIds
239+
);
237240

238241
// Reset retry manager on success
239242
if (this.retryManager) {
@@ -249,7 +252,9 @@ export class SegmentDestination extends DestinationPlugin {
249252

250253
// Handle permanent errors - dequeue (drop)
251254
if (aggregation.permanentErrorMessageIds.length > 0) {
252-
await this.queuePlugin.dequeueByMessageIds(aggregation.permanentErrorMessageIds);
255+
await this.queuePlugin.dequeueByMessageIds(
256+
aggregation.permanentErrorMessageIds
257+
);
253258
this.analytics?.logger.error(
254259
`Dropped ${aggregation.permanentErrorMessageIds.length} events due to permanent errors`
255260
);
@@ -301,7 +306,10 @@ export class SegmentDestination extends DestinationPlugin {
301306
const config = analytics.getConfig();
302307

303308
// Initialize retry manager (handles both 429 rate limiting and transient errors)
304-
if (config.httpConfig?.rateLimitConfig || config.httpConfig?.backoffConfig) {
309+
if (
310+
config.httpConfig?.rateLimitConfig ||
311+
config.httpConfig?.backoffConfig
312+
) {
305313
this.retryManager = new RetryManager(
306314
config.writeKey,
307315
config.storePersistor,

0 commit comments

Comments
 (0)